... | ... |
@@ -6,6 +6,7 @@ import itertools as it |
6 | 6 |
import functools as ft |
7 | 7 |
import random |
8 | 8 |
import math |
9 |
+import numpy as np |
|
9 | 10 |
|
10 | 11 |
import pytest |
11 | 12 |
|
... | ... |
@@ -86,15 +87,30 @@ def run_with(*learner_types): |
86 | 87 |
) |
87 | 88 |
|
88 | 89 |
|
89 |
-@pytest.mark.xfail |
|
90 |
-@run_with(Learner1D, learner2D) |
|
90 |
+@run_with(Learner1D, Learner2D) |
|
91 | 91 |
def test_uniform_sampling(learner_type, f, learner_kwargs): |
92 | 92 |
"""Points are sampled uniformly if no data is provided. |
93 | 93 |
|
94 | 94 |
Non-uniform sampling implies that we think we know something about |
95 | 95 |
the function, which we do not in the absence of data. |
96 | 96 |
""" |
97 |
- raise NotImplementedError() |
|
97 |
+ f = generate_random_parametrization(f) |
|
98 |
+ learner = learner_type(f, **learner_kwargs) |
|
99 |
+ |
|
100 |
+ n_rounds = random.randrange(70, 100) |
|
101 |
+ n_points = [random.randrange(10, 20) for _ in range(n_rounds)] |
|
102 |
+ |
|
103 |
+ xs = [] |
|
104 |
+ for n in n_points: |
|
105 |
+ x, _ = learner.choose_points(n) |
|
106 |
+ xs.extend(x) |
|
107 |
+ |
|
108 |
+ if learner_type is Learner1D: |
|
109 |
+ xs.sort() |
|
110 |
+ ivals = np.diff(sorted(xs)) |
|
111 |
+ assert max(ivals) / min(ivals) < 2 + 1e-8 |
|
112 |
+ else: |
|
113 |
+ raise RuntimeError('No test for {}'.format(learner_type)) |
|
98 | 114 |
|
99 | 115 |
|
100 | 116 |
@run_with(Learner1D, Learner2D) |