Browse code

remove check for infinite loss when there is no data

This should emphatically *not* be the case for the 1D and
2D learners, as the loss enforce an almost-uniform sampling
of the space in the absence of data.

Joseph Weston authored on 16/11/2017 11:53:49 • Bas Nijholt committed on 20/11/2017 15:49:57
Showing 1 changed files
... ...
@@ -201,10 +201,6 @@ def test_expected_loss_improvement_is_less_than_total_loss(learner_type, f, lear
201 201
     N = random.randint(50, 100)
202 202
     xs, loss_improvements = learner.choose_points(N)
203 203
 
204
-    # no data -- loss is infinite
205
-    assert all(l == float('inf') for l in loss_improvements)
206
-    assert learner.loss() == float('inf')
207
-
208 204
     for x in xs:
209 205
         learner.add_point(x, f(x))
210 206