Skip to content

Commit

Permalink
fix test
Browse files Browse the repository at this point in the history
  • Loading branch information
Aggrathon committed Apr 14, 2022
1 parent 4f67bb0 commit 6155d0c
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions tests/test_optim.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,31 +140,31 @@ def test_gradopt():
X, Y = data_create(20, 5)
alpha = np.random.normal(size=5)
alpha2 = graduated_optimisation(alpha, X, Y, 0.1, beta=100)
assert loss_smooth(alpha, X, Y, 0.1, beta=100) > loss_smooth(
assert loss_smooth(alpha, X, Y, 0.1, beta=100) >= loss_smooth(
alpha2, X, Y, 0.1, beta=100
)
alpha2 = graduated_optimisation(alpha, X, Y, 0.1, beta=100, lambda1=0.5)
assert loss_smooth(alpha, X, Y, 0.1, beta=100, lambda1=0.5) > loss_smooth(
assert loss_smooth(alpha, X, Y, 0.1, beta=100, lambda1=0.5) >= loss_smooth(
alpha2, X, Y, 0.1, beta=100, lambda1=0.5
)
alpha2 = graduated_optimisation(alpha, X, Y, 0.1, beta=100, lambda2=0.5)
assert loss_smooth(alpha, X, Y, 0.1, beta=100, lambda2=0.5) > loss_smooth(
assert loss_smooth(alpha, X, Y, 0.1, beta=100, lambda2=0.5) >= loss_smooth(
alpha2, X, Y, 0.1, beta=100, lambda2=0.5
)
# With weight
w = np.random.uniform(size=20)
alpha2 = graduated_optimisation(alpha, X, Y, 0.1, beta=100, weight=w)
assert loss_smooth(alpha, X, Y, 0.1, beta=100, weight=w) > loss_smooth(
assert loss_smooth(alpha, X, Y, 0.1, beta=100, weight=w) >= loss_smooth(
alpha2, X, Y, 0.1, beta=100, weight=w
)
alpha2 = graduated_optimisation(alpha, X, Y, 0.1, beta=100, lambda1=0.5, weight=w)
assert loss_smooth(alpha, X, Y, 0.1, beta=100, lambda1=0.5, weight=w) > loss_smooth(
alpha2, X, Y, 0.1, beta=100, lambda1=0.5, weight=w
)
assert loss_smooth(
alpha, X, Y, 0.1, beta=100, lambda1=0.5, weight=w
) >= loss_smooth(alpha2, X, Y, 0.1, beta=100, lambda1=0.5, weight=w)
alpha2 = graduated_optimisation(alpha, X, Y, 0.1, beta=100, lambda2=0.5, weight=w)
assert loss_smooth(alpha, X, Y, 0.1, beta=100, lambda2=0.5, weight=w) > loss_smooth(
alpha2, X, Y, 0.1, beta=100, lambda2=0.5, weight=w
)
assert loss_smooth(
alpha, X, Y, 0.1, beta=100, lambda2=0.5, weight=w
) >= loss_smooth(alpha2, X, Y, 0.1, beta=100, lambda2=0.5, weight=w)


def test_regres():
Expand Down

0 comments on commit 6155d0c

Please sign in to comment.