The default loss functions will now become public API (in case
anyone wants to use them in their own loss functions) and the
non-standard loss functions will be accessed as
`adaptive.learner.learner2D.uniform` etc.
... | ... |
@@ -2,7 +2,7 @@ |
2 | 2 |
from .average_learner import AverageLearner |
3 | 3 |
from .base_learner import BaseLearner |
4 | 4 |
from .balancing_learner import BalancingLearner |
5 |
-from .learner1D import Learner1D, uniform_sampling_1d |
|
6 |
-from .learner2D import Learner2D, uniform_sampling_2d |
|
5 |
+from .learner1D import Learner1D |
|
6 |
+from .learner2D import Learner2D |
|
7 | 7 |
from .integrator_learner import IntegratorLearner |
8 | 8 |
from .data_saver import DataSaver |
... | ... |
@@ -11,7 +11,7 @@ import scipy.interpolate |
11 | 11 |
from ..notebook_integration import ensure_holoviews |
12 | 12 |
from .base_learner import BaseLearner |
13 | 13 |
|
14 |
-def uniform_sampling_1d(interval, scale, function_values): |
|
14 |
+def uniform_loss(interval, scale, function_values): |
|
15 | 15 |
"""Loss function that samples the domain uniformly. |
16 | 16 |
|
17 | 17 |
Works with `~adaptive.Learner1D` only. |
... | ... |
@@ -32,7 +32,7 @@ def uniform_sampling_1d(interval, scale, function_values): |
32 | 32 |
return dx |
33 | 33 |
|
34 | 34 |
|
35 |
-def _default_loss_per_interval(interval, scale, function_values): |
|
35 |
+def default_loss(interval, scale, function_values): |
|
36 | 36 |
"""Calculate loss on a single interval |
37 | 37 |
|
38 | 38 |
Currently returns the rescaled length of the interval. If one of the |
... | ... |
@@ -87,7 +87,7 @@ class Learner1D(BaseLearner): |
87 | 87 |
|
88 | 88 |
def __init__(self, function, bounds, loss_per_interval=None): |
89 | 89 |
self.function = function |
90 |
- self.loss_per_interval = loss_per_interval or _default_loss_per_interval |
|
90 |
+ self.loss_per_interval = loss_per_interval or default_loss |
|
91 | 91 |
|
92 | 92 |
# A dict storing the loss function for each interval x_n. |
93 | 93 |
self.losses = {} |
... | ... |
@@ -42,7 +42,7 @@ def areas(ip): |
42 | 42 |
return areas |
43 | 43 |
|
44 | 44 |
|
45 |
-def uniform_sampling_2d(ip): |
|
45 |
+def uniform_loss(ip): |
|
46 | 46 |
"""Loss function that samples the domain uniformly. |
47 | 47 |
|
48 | 48 |
Works with `~adaptive.Learner2D` only. |
... | ... |
@@ -86,7 +86,7 @@ def resolution_loss(ip, min_distance=0, max_distance=1): |
86 | 86 |
A = areas(ip) |
87 | 87 |
dev = np.sum(deviations(ip), axis=0) |
88 | 88 |
|
89 |
- # similar to the _default_loss_per_triangle |
|
89 |
+ # similar to the default_loss |
|
90 | 90 |
loss = np.sqrt(A) * dev + A |
91 | 91 |
|
92 | 92 |
# Setting areas with a small area to zero such that they won't be chosen again |
... | ... |
@@ -99,7 +99,7 @@ def resolution_loss(ip, min_distance=0, max_distance=1): |
99 | 99 |
return loss |
100 | 100 |
|
101 | 101 |
|
102 |
-def _default_loss_per_triangle(ip): |
|
102 |
+def default_loss(ip): |
|
103 | 103 |
dev = np.sum(deviations(ip), axis=0) |
104 | 104 |
A = areas(ip) |
105 | 105 |
losses = dev * np.sqrt(A) + 0.1 * A |
... | ... |
@@ -207,7 +207,7 @@ class Learner2D(BaseLearner): |
207 | 207 |
def __init__(self, function, bounds, loss_per_triangle=None): |
208 | 208 |
self.ndim = len(bounds) |
209 | 209 |
self._vdim = None |
210 |
- self.loss_per_triangle = loss_per_triangle or _default_loss_per_triangle |
|
210 |
+ self.loss_per_triangle = loss_per_triangle or default_loss |
|
211 | 211 |
self.bounds = tuple((float(a), float(b)) for a, b in bounds) |
212 | 212 |
self.data = OrderedDict() |
213 | 213 |
self._stack = OrderedDict() |