... | ... |
@@ -7,6 +7,7 @@ import functools as ft |
7 | 7 |
import random |
8 | 8 |
import math |
9 | 9 |
import numpy as np |
10 |
+import scipy.spatial |
|
10 | 11 |
|
11 | 12 |
import pytest |
12 | 13 |
|
... | ... |
@@ -87,8 +88,22 @@ def run_with(*learner_types): |
87 | 88 |
) |
88 | 89 |
|
89 | 90 |
|
90 |
-@run_with(Learner1D, Learner2D) |
|
91 |
-def test_uniform_sampling(learner_type, f, learner_kwargs): |
|
91 |
+def choose_points_randomly(learner, rounds, points): |
|
92 |
+ n_rounds = random.randrange(*rounds) |
|
93 |
+ n_points = [random.randrange(*points) for _ in range(n_rounds)] |
|
94 |
+ |
|
95 |
+ xs = [] |
|
96 |
+ ls = [] |
|
97 |
+ for n in n_points: |
|
98 |
+ x, l = learner.choose_points(n) |
|
99 |
+ xs.extend(x) |
|
100 |
+ ls.extend(l) |
|
101 |
+ |
|
102 |
+ return xs, ls |
|
103 |
+ |
|
104 |
+ |
|
105 |
+@run_with(Learner1D) |
|
106 |
+def test_uniform_sampling1D(learner_type, f, learner_kwargs): |
|
92 | 107 |
"""Points are sampled uniformly if no data is provided. |
93 | 108 |
|
94 | 109 |
Non-uniform sampling implies that we think we know something about |
... | ... |
@@ -97,20 +112,35 @@ def test_uniform_sampling(learner_type, f, learner_kwargs): |
97 | 112 |
f = generate_random_parametrization(f) |
98 | 113 |
learner = learner_type(f, **learner_kwargs) |
99 | 114 |
|
100 |
- n_rounds = random.randrange(70, 100) |
|
101 |
- n_points = [random.randrange(10, 20) for _ in range(n_rounds)] |
|
115 |
+ points, _ = choose_points_randomly(learner, (10, 20), (10, 20)) |
|
102 | 116 |
|
103 |
- xs = [] |
|
104 |
- for n in n_points: |
|
105 |
- x, _ = learner.choose_points(n) |
|
106 |
- xs.extend(x) |
|
117 |
+ points.sort() |
|
118 |
+ ivals = np.diff(sorted(points)) |
|
119 |
+ assert max(ivals) / min(ivals) < 2 + 1e-8 |
|
120 |
+ |
|
121 |
+ |
|
122 |
+@run_with(Learner2D) |
|
123 |
+def test_uniform_sampling2D(learner_type, f, learner_kwargs): |
|
124 |
+ """Points are sampled uniformly if no data is provided. |
|
125 |
+ |
|
126 |
+ Non-uniform sampling implies that we think we know something about |
|
127 |
+ the function, which we do not in the absence of data. |
|
128 |
+ """ |
|
129 |
+ f = generate_random_parametrization(f) |
|
130 |
+ learner = learner_type(f, **learner_kwargs) |
|
131 |
+ |
|
132 |
+ points, _ = choose_points_randomly(learner, (70, 100), (10, 20)) |
|
133 |
+ tree = scipy.spatial.cKDTree(points) |
|
134 |
+ |
|
135 |
+ # regular grid |
|
136 |
+ n = math.sqrt(len(points)) |
|
137 |
+ xbounds, ybounds = learner_kwargs['bounds'] |
|
138 |
+ r = math.sqrt((ybounds[1] - ybounds[0]) / (xbounds[1] - xbounds[0])) |
|
139 |
+ xs, dx = np.linspace(*xbounds, int(n / r), retstep=True) |
|
140 |
+ ys, dy = np.linspace(*ybounds, int(n * r), retstep=True) |
|
107 | 141 |
|
108 |
- if learner_type is Learner1D: |
|
109 |
- xs.sort() |
|
110 |
- ivals = np.diff(sorted(xs)) |
|
111 |
- assert max(ivals) / min(ivals) < 2 + 1e-8 |
|
112 |
- else: |
|
113 |
- raise RuntimeError('No test for {}'.format(learner_type)) |
|
142 |
+ distances, neighbors = tree.query(list(it.product(xs, ys)), k=1) |
|
143 |
+ assert max(distances) < math.sqrt(dx**2 + dy**2) |
|
114 | 144 |
|
115 | 145 |
|
116 | 146 |
@run_with(Learner1D, Learner2D) |