Browse code

add 'with_all_loss_functions' to 'run_with'

Bas Nijholt authored on 23/11/2018 11:41:52
Showing 1 changed files
... ...
@@ -139,14 +139,14 @@ def add_loss_to_params(learner_type, existing_params):
139 139
     return [dict(**existing_params, **lp) for lp in loss_params]
140 140
 
141 141
 
142
-def run_with(*learner_types):
142
+def run_with(*learner_types, with_all_loss_functions=True):
143 143
     pars = []
144 144
     for l in learner_types:
145 145
         has_marker = isinstance(l, tuple)
146 146
         if has_marker:
147 147
             marker, l = l
148 148
         for f, k in learner_function_combos[l]:
149
-            ks = add_loss_to_params(l, k)
149
+            ks = add_loss_to_params(l, k) if with_all_loss_functions else [k]
150 150
             for k in ks:
151 151
                 # Check if learner was marked with our `xfail` decorator
152 152
                 # XXX: doesn't work when feeding kwargs to xfail.
... ...
@@ -402,7 +402,8 @@ def test_learner_performance_is_invariant_under_scaling(learner_type, f, learner
402 402
     assert abs(learner.loss() - control.loss()) / learner.loss() < 1e-11
403 403
 
404 404
 
405
-@run_with(Learner1D, Learner2D, LearnerND, AverageLearner)
405
+@run_with(Learner1D, Learner2D, LearnerND, AverageLearner,
406
+    with_all_loss_functions=False)
406 407
 def test_balancing_learner(learner_type, f, learner_kwargs):
407 408
     """Test if the BalancingLearner works with the different types of learners."""
408 409
     learners = [learner_type(generate_random_parametrization(f), **learner_kwargs)
... ...
@@ -436,7 +437,8 @@ def test_balancing_learner(learner_type, f, learner_kwargs):
436 437
 
437 438
 
438 439
 @run_with(Learner1D, Learner2D, LearnerND, AverageLearner,
439
-    maybe_skip(SKOptLearner), IntegratorLearner)
440
+    maybe_skip(SKOptLearner), IntegratorLearner,
441
+    with_all_loss_functions=False)
440 442
 def test_saving(learner_type, f, learner_kwargs):
441 443
     f = generate_random_parametrization(f)
442 444
     learner = learner_type(f, **learner_kwargs)
... ...
@@ -457,7 +459,8 @@ def test_saving(learner_type, f, learner_kwargs):
457 459
 
458 460
 
459 461
 @run_with(Learner1D, Learner2D, LearnerND, AverageLearner,
460
-    maybe_skip(SKOptLearner), IntegratorLearner)
462
+    maybe_skip(SKOptLearner), IntegratorLearner,
463
+    with_all_loss_functions=False)
461 464
 def test_saving_of_balancing_learner(learner_type, f, learner_kwargs):
462 465
     f = generate_random_parametrization(f)
463 466
     learner = BalancingLearner([learner_type(f, **learner_kwargs)])
... ...
@@ -483,7 +486,8 @@ def test_saving_of_balancing_learner(learner_type, f, learner_kwargs):
483 486
 
484 487
 
485 488
 @run_with(Learner1D, Learner2D, LearnerND, AverageLearner,
486
-    maybe_skip(SKOptLearner), IntegratorLearner)
489
+    maybe_skip(SKOptLearner), IntegratorLearner,
490
+    with_all_loss_functions=False)
487 491
 def test_saving_with_datasaver(learner_type, f, learner_kwargs):
488 492
     f = generate_random_parametrization(f)
489 493
     g = lambda x: {'y': f(x), 't': random.random()}