Browse code

Implement Anton's AverageLearner

Bas Nijholt authored on 21/08/2017 11:53:15
Showing 2 changed files
... ...
@@ -14,7 +14,6 @@
14 14
    "outputs": [],
15 15
    "source": [
16 16
     "import adaptive\n",
17
-    "import numpy as np\n",
18 17
     "adaptive.notebook_extension()\n",
19 18
     "\n",
20 19
     "def func(x, wait=True):\n",
... ...
@@ -26,7 +25,7 @@
26 25
     "    x = np.asarray(x)\n",
27 26
     "    a = 0.001\n",
28 27
     "    if wait:\n",
29
-    "        sleep(np.random.randint(1, 3))\n",
28
+    "        sleep(np.random.randint(0, 2) / 10)\n",
30 29
     "    return x + a**2/(a**2 + (x)**2)"
31 30
    ]
32 31
   },
... ...
@@ -39,22 +38,33 @@
39 38
     "## Local Process Pool (default)"
40 39
    ]
41 40
   },
41
+  {
42
+   "cell_type": "code",
43
+   "execution_count": null,
44
+   "metadata": {
45
+    "scrolled": false
46
+   },
47
+   "outputs": [],
48
+   "source": [
49
+    "learner = adaptive.learner.Learner1D(func, bounds=(-1.01, 1.0))\n",
50
+    "runner = adaptive.Runner(learner, goal=lambda l: l.loss(real=True) < 0.01)\n",
51
+    "adaptive.live_plot(runner)"
52
+   ]
53
+  },
42 54
   {
43 55
    "cell_type": "code",
44 56
    "execution_count": null,
45 57
    "metadata": {},
46 58
    "outputs": [],
47 59
    "source": [
48
-    "import concurrent.futures as concurrent\n",
49
-    "# Initialize the learner\n",
50
-    "learner = adaptive.learner.Learner1D(func, bounds=(-1, 1))\n",
51
-    "\n",
52
-    "# Next two lines not needed, but just as example that it still works\n",
53
-    "learner.add_point(-1, func(-1))\n",
54
-    "learner.add_point(1, func(1))\n",
60
+    "# Same function evaluated on homogeneous grid with same amount of points\n",
61
+    "from functools import partial\n",
62
+    "import numpy as np\n",
55 63
     "\n",
56
-    "runner = adaptive.Runner(learner, concurrent.ProcessPoolExecutor(48), goal=lambda l: l.loss(real=True) < 0.01)\n",
57
-    "adaptive.live_plot(runner)"
64
+    "learner2 = adaptive.learner.Learner1D(func, bounds=(-1.01, 1.0))\n",
65
+    "xs = np.linspace(-1.01, 1.0, len(learner.data))\n",
66
+    "learner2.add_data(xs, map(partial(func, wait=False), xs))\n",
67
+    "learner2.plot()"
58 68
    ]
59 69
   },
60 70
   {
... ...
@@ -83,6 +93,32 @@
83 93
     "runner = adaptive.Runner(learner, client, goal=lambda l: l.loss() < 0.1)\n",
84 94
     "adaptive.live_plot(runner)"
85 95
    ]
96
+  },
97
+  {
98
+   "cell_type": "markdown",
99
+   "metadata": {},
100
+   "source": [
101
+    "## 0D Learner"
102
+   ]
103
+  },
104
+  {
105
+   "cell_type": "code",
106
+   "execution_count": null,
107
+   "metadata": {
108
+    "scrolled": false
109
+   },
110
+   "outputs": [],
111
+   "source": [
112
+    "def func(x):\n",
113
+    "    import random\n",
114
+    "    import numpy as np\n",
115
+    "    from time import sleep\n",
116
+    "    sleep(np.random.randint(0, 2))\n",
117
+    "    return random.gauss(0.05, 1)\n",
118
+    "\n",
119
+    "learner = adaptive.learner.AverageLearner(func, None, 0.1)\n",
120
+    "runner = adaptive.Runner(learner, goal=lambda l: l.loss() < 1)"
121
+   ]
86 122
   }
87 123
  ],
88 124
  "metadata": {
... ...
@@ -49,9 +49,10 @@ class BaseLearner(metaclass=abc.ABCMeta):
49 49
         """Add a single datapoint to the learner."""
50 50
         self.data[x] = y
51 51
 
52
+    @abc.abstractmethod
52 53
     def remove_unfinished(self):
53 54
         """Remove uncomputed data from the learner."""
54
-        self.data = {k: v for k, v in self.data.items() if v is not None}
55
+        pass
55 56
 
56 57
     @abc.abstractmethod
57 58
     def loss(self, real=True):
... ...
@@ -59,8 +60,8 @@ class BaseLearner(metaclass=abc.ABCMeta):
59 60
 
60 61
         Parameters
61 62
         ----------
62
-        expected : bool, default: False
63
-            If True, return the "expected" loss, i.e. the
63
+        real : bool, default: True
64
+            If False, return the "expected" loss, i.e. the
64 65
             loss including the as-yet unevaluated points
65 66
             (possibly by interpolation).
66 67
         """
... ...
@@ -95,6 +96,72 @@ class BaseLearner(metaclass=abc.ABCMeta):
95 96
             The number of points to choose.
96 97
         """
97 98
 
99
+class AverageLearner(BaseLearner):
100
+    def __init__(self, function, atol=None, rtol=None):
101
+        """A naive implementation of adaptive computing of averages.
102
+
103
+        The learned function must depend on an integer input variable that
104
+        represents the source of randomness.
105
+
106
+        Parameters:
107
+        -----------
108
+        atol : float
109
+            Desired absolute tolerance
110
+        rtol : float
111
+            Desired relative tolerance
112
+        """
113
+        super().__init__(function)
114
+
115
+        if atol is None and rtol is None:
116
+            raise Exception('At least one of `atol` and `rtol` should be set.')
117
+        if atol is None:
118
+            atol = np.inf
119
+        if rtol is None:
120
+            rtol = np.inf
121
+
122
+        self.function = function
123
+        self.atol = atol
124
+        self.rtol = rtol
125
+        self.n = 0
126
+        self.n_requested = 0
127
+        self.sum_f = 0
128
+        self.sum_f_sq = 0
129
+
130
+    def _choose_points(self, n=10):
131
+        return list(range(self.n_requested, self.n_requested + n))
132
+
133
+    def add_point(self, n, value):
134
+        super().add_point(n, value)
135
+        if value is None:
136
+            self.n_requested += 1
137
+            return
138
+        else:
139
+            self.n += 1
140
+            self.sum_f += value
141
+            self.sum_f_sq += value**2
142
+
143
+    @property
144
+    def mean(self):
145
+        return self.sum_f / self.n
146
+
147
+    @property
148
+    def std(self):
149
+        n = self.n
150
+        if n < 2:
151
+            return np.inf
152
+        return sqrt((self.sum_f_sq - n * self.mean**2) / (n - 1))
153
+
154
+    def loss(self, real=True):
155
+        n = self.n
156
+        if n < 2:
157
+            return np.inf
158
+        standard_error = self.std / sqrt(n if real else self.n_requested)
159
+        return max(standard_error / self.atol,
160
+                   standard_error / abs(self.mean) / self.rtol)
161
+
162
+    def remove_unfinished(self):
163
+        """Remove uncomputed data from the learner."""
164
+        pass
98 165
 
99 166
 class Learner1D(BaseLearner):
100 167
     """Learns and predicts a function 'f:ℝ → ℝ'.
... ...
@@ -247,7 +314,7 @@ class Learner1D(BaseLearner):
247 314
         return xs
248 315
 
249 316
     def remove_unfinished(self):
250
-        super().remove_unfinished()
317
+        self.data = {k: v for k, v in self.data.items() if v is not None}
251 318
         # self.losses = self.real_losses
252 319
         # self.neighbors = self.real_neighbors
253 320