Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
rlabbe
GitHub Repository: rlabbe/Kalman-and-Bayesian-Filters-in-Python
Path: blob/master/experiments/RobotLocalizationParticleFilter.py
1700 views
1
# -*- coding: utf-8 -*-
2
3
"""Copyright 2015 Roger R Labbe Jr.
4
5
6
Code supporting the book
7
8
Kalman and Bayesian Filters in Python
9
https://github.com/rlabbe/Kalman-and-Bayesian-Filters-in-Python
10
11
12
This is licensed under an MIT license. See the LICENSE.txt file
13
for more information.
14
"""
15
16
from __future__ import (absolute_import, division, print_function,
17
unicode_literals)
18
19
import numpy as np
20
21
from numpy.random import randn, random, uniform
22
import scipy.stats
23
24
25
class RobotLocalizationParticleFilter(object):
26
27
def __init__(self, N, x_dim, y_dim, landmarks, measure_std_error):
28
self.particles = np.empty((N, 3)) # x, y, heading
29
self.N = N
30
self.x_dim = x_dim
31
self.y_dim = y_dim
32
self.landmarks = landmarks
33
self.R = measure_std_error
34
35
# distribute particles randomly with uniform weight
36
self.weights = np.empty(N)
37
#self.weights.fill(1./N)
38
'''self.particles[:, 0] = uniform(0, x_dim, size=N)
39
self.particles[:, 1] = uniform(0, y_dim, size=N)
40
self.particles[:, 2] = uniform(0, 2*np.pi, size=N)'''
41
42
43
def create_uniform_particles(self, x_range, y_range, hdg_range):
44
self.particles[:, 0] = uniform(x_range[0], x_range[1], size=N)
45
self.particles[:, 1] = uniform(y_range[0], y_range[1], size=N)
46
self.particles[:, 2] = uniform(hdg_range[0], hdg_range[1], size=N)
47
self.particles[:, 2] %= 2 * np.pi
48
49
def create_gaussian_particles(self, mean, var):
50
self.particles[:, 0] = mean[0] + randn(self.N)*var[0]
51
self.particles[:, 1] = mean[1] + randn(self.N)*var[1]
52
self.particles[:, 2] = mean[2] + randn(self.N)*var[2]
53
self.particles[:, 2] %= 2 * np.pi
54
55
56
def predict(self, u, std, dt=1.):
57
""" move according to control input u (heading change, velocity)
58
with noise std"""
59
60
self.particles[:, 2] += u[0] + randn(self.N) * std[0]
61
self.particles[:, 2] %= 2 * np.pi
62
63
d = u[1]*dt + randn(self.N) * std[1]
64
self.particles[:, 0] += np.cos(self.particles[:, 2]) * d
65
self.particles[:, 1] += np.sin(self.particles[:, 2]) * d
66
67
68
def update(self, z):
69
self.weights.fill(1.)
70
for i, landmark in enumerate(self.landmarks):
71
distance = np.linalg.norm(self.particles[:, 0:2] - landmark, axis=1)
72
self.weights *= scipy.stats.norm(distance, self.R).pdf(z[i])
73
#self.weights *= Gaussian(distance, self.R, z[i])
74
75
self.weights += 1.e-300
76
self.weights /= sum(self.weights) # normalize
77
78
79
def neff(self):
80
return 1. / np.sum(np.square(self.weights))
81
82
83
def resample(self):
84
cumulative_sum = np.cumsum(self.weights)
85
cumulative_sum[-1] = 1. # avoid round-off error
86
indexes = np.searchsorted(cumulative_sum, random(self.N))
87
88
# resample according to indexes
89
self.particles = self.particles[indexes]
90
self.weights = self.weights[indexes]
91
self.weights /= np.sum(self.weights) # normalize
92
93
94
def resample_from_index(self, indexes):
95
assert len(indexes) == self.N
96
97
self.particles = self.particles[indexes]
98
self.weights = self.weights[indexes]
99
self.weights /= np.sum(self.weights)
100
101
102
def estimate(self):
103
""" returns mean and variance """
104
pos = self.particles[:, 0:2]
105
mu = np.average(pos, weights=self.weights, axis=0)
106
var = np.average((pos - mu)**2, weights=self.weights, axis=0)
107
108
return mu, var
109
110
def mean(self):
111
""" returns weighted mean position"""
112
return np.average(self.particles[:, 0:2], weights=self.weights, axis=0)
113
114
115
116
def residual_resample(w):
117
118
N = len(w)
119
120
w_ints = np.floor(N*w).astype(int)
121
residual = w - w_ints
122
residual /= sum(residual)
123
124
indexes = np.zeros(N, 'i')
125
k = 0
126
for i in range(N):
127
for j in range(w_ints[i]):
128
indexes[k] = i
129
k += 1
130
cumsum = np.cumsum(residual)
131
cumsum[N-1] = 1.
132
for j in range(k, N):
133
indexes[j] = np.searchsorted(cumsum, random())
134
135
return indexes
136
137
138
139
def residual_resample2(w):
140
141
N = len(w)
142
143
w_ints =np.floor(N*w).astype(int)
144
145
R = np.sum(w_ints)
146
m_rdn = N - R
147
148
Ws = (N*w - w_ints)/ m_rdn
149
indexes = np.zeros(N, 'i')
150
i = 0
151
for j in range(N):
152
for k in range(w_ints[j]):
153
indexes[i] = j
154
i += 1
155
cumsum = np.cumsum(Ws)
156
cumsum[N-1] = 1 # just in case
157
158
for j in range(i, N):
159
indexes[j] = np.searchsorted(cumsum, random())
160
161
return indexes
162
163
164
165
def systemic_resample(w):
166
N = len(w)
167
Q = np.cumsum(w)
168
indexes = np.zeros(N, 'int')
169
t = np.linspace(0, 1-1/N, N) + random()/N
170
171
i, j = 0, 0
172
while i < N and j < N:
173
while Q[j] < t[i]:
174
j += 1
175
indexes[i] = j
176
i += 1
177
178
return indexes
179
180
181
182
183
184
def Gaussian(mu, sigma, x):
185
186
# calculates the probability of x for 1-dim Gaussian with mean mu and var. sigma
187
g = (np.exp(-((mu - x) ** 2) / (sigma ** 2) / 2.0) /
188
np.sqrt(2.0 * np.pi * (sigma ** 2)))
189
for i in range(len(g)):
190
g[i] = max(g[i], 1.e-229)
191
return g
192
193
194
def test_pf():
195
196
#seed(1234)
197
N = 10000
198
R = .2
199
landmarks = [[-1, 2], [20,4], [10,30], [18,25]]
200
#landmarks = [[-1, 2], [2,4]]
201
202
pf = RobotLocalizationParticleFilter(N, 20, 20, landmarks, R)
203
204
plot_pf(pf, 20, 20, weights=False)
205
206
dt = .01
207
plt.pause(dt)
208
209
for x in range(18):
210
211
zs = []
212
pos=(x+3, x+3)
213
214
for landmark in landmarks:
215
d = np.sqrt((landmark[0]-pos[0])**2 + (landmark[1]-pos[1])**2)
216
zs.append(d + randn()*R)
217
218
pf.predict((0.01, 1.414), (.2, .05))
219
pf.update(z=zs)
220
pf.resample()
221
#print(x, np.array(list(zip(pf.particles, pf.weights))))
222
223
mu, var = pf.estimate()
224
plot_pf(pf, 20, 20, weights=False)
225
plt.plot(pos[0], pos[1], marker='*', color='r', ms=10)
226
plt.scatter(mu[0], mu[1], color='g', s=100)
227
plt.tight_layout()
228
plt.pause(dt)
229
230
231
def test_pf2():
232
N = 1000
233
sensor_std_err = .2
234
landmarks = [[-1, 2], [20,4], [-20,6], [18,25]]
235
236
pf = RobotLocalizationParticleFilter(N, 20, 20, landmarks, sensor_std_err)
237
238
xs = []
239
for x in range(18):
240
zs = []
241
pos=(x+1, x+1)
242
243
for landmark in landmarks:
244
d = np.sqrt((landmark[0]-pos[0])**2 + (landmark[1]-pos[1])**2)
245
zs.append(d + randn()*sensor_std_err)
246
247
# move diagonally forward to (x+1, x+1)
248
pf.predict((0.00, 1.414), (.2, .05))
249
pf.update(z=zs)
250
pf.resample()
251
252
mu, var = pf.estimate()
253
xs.append(mu)
254
255
xs = np.array(xs)
256
plt.plot(xs[:, 0], xs[:, 1])
257
plt.show()
258
259
260
261
if __name__ == '__main__':
262
263
DO_PLOT_PARTICLES = False
264
from numpy.random import seed
265
import matplotlib.pyplot as plt
266
267
#plt.figure()
268
269
seed(5)
270
for count in range(10):
271
print()
272
print(count)
273
#numpy.random.set_state(fail_state)
274
#if count == 12:
275
# #fail_state = numpy.random.get_state()
276
# DO_PLOT_PARTICLES = True
277
278
N = 4000
279
sensor_std_err = .1
280
landmarks = np.array([[-1, 2], [2,4], [10,6], [18,25]])
281
NL = len(landmarks)
282
283
#landmarks = [[-1, 2], [2,4]]
284
285
pf = RobotLocalizationParticleFilter(N, 20, 20, landmarks, sensor_std_err)
286
#pf.create_gaussian_particles([3, 2, 0], [5, 5, 2])
287
pf.create_uniform_particles((0,20), (0,20), (0, 6.28))
288
289
if DO_PLOT_PARTICLES:
290
plt.scatter(pf.particles[:, 0], pf.particles[:, 1], alpha=.2, color='g')
291
292
xs = []
293
for x in range(18):
294
zs = []
295
pos=(x+1, x+1)
296
297
for landmark in landmarks:
298
d = np.sqrt((landmark[0]-pos[0])**2 + (landmark[1]-pos[1])**2)
299
zs.append(d + randn()*sensor_std_err)
300
301
302
zs = np.linalg.norm(landmarks - pos, axis=1) + randn(NL)*sensor_std_err
303
304
305
# move diagonally forward to (x+1, x+1)
306
pf.predict((0.00, 1.414), (.2, .05))
307
308
pf.update(z=zs)
309
if x == 0:
310
print(max(pf.weights))
311
#while abs(pf.neff() -N) < .1:
312
# print('neffing')
313
# pf.create_uniform_particles((0,20), (0,20), (0, 6.28))
314
# pf.update(z=zs)
315
#print(pf.neff())
316
#indexes = residual_resample2(pf.weights)
317
indexes = systemic_resample(pf.weights)
318
319
pf.resample_from_index(indexes)
320
#pf.resample()
321
322
mu, var = pf.estimate()
323
xs.append(mu)
324
if DO_PLOT_PARTICLES:
325
plt.scatter(pf.particles[:, 0], pf.particles[:, 1], alpha=.2)
326
plt.scatter(pos[0], pos[1], marker='*', color='r')
327
plt.scatter(mu[0], mu[1], marker='s', color='r')
328
plt.pause(.01)
329
330
xs = np.array(xs)
331
plt.plot(xs[:, 0], xs[:, 1])
332
plt.show()
333
334