Path: blob/master/experiments/RobotLocalizationParticleFilter_2.py
1700 views
# -*- coding: utf-8 -*-12"""Copyright 2015 Roger R Labbe Jr.345Code supporting the book67Kalman and Bayesian Filters in Python8https://github.com/rlabbe/Kalman-and-Bayesian-Filters-in-Python91011This is licensed under an MIT license. See the LICENSE.txt file12for more information.13"""1415from __future__ import (absolute_import, division, print_function,16unicode_literals)1718import numpy as np1920from numpy.random import randn, random, uniform21import scipy.stats22232425def create_uniform_particles( x_range, y_range, hdg_range, N):26particles = np.empty((N, 3))27particles[:, 0] = uniform(x_range[0], x_range[1], size=N)28particles[:, 1] = uniform(y_range[0], y_range[1], size=N)29particles[:, 2] = uniform(hdg_range[0], hdg_range[1], size=N)30particles[:, 2] %= 2 * np.pi3132return particles333435def create_gaussian_particles( mean, var, N):36particles = np.empty((N, 3))37particles[:, 0] = mean[0] + randn(N)*var[0]38particles[:, 1] = mean[1] + randn(N)*var[1]39particles[:, 2] = mean[2] + randn(N)*var[2]40particles[:, 2] %= 2 * np.pi41return particles42434445def predict(particles, u, std, dt=1.):46""" move according to control input u (heading change, velocity)47with noise `std (std_heading, std`"""4849N = len(particles)5051particles[:, 2] += u[0] + randn(N) * std[0]52particles[:, 2] %= 2 * np.pi5354d = u[1]*dt + randn(N) * std[1]55particles[:, 0] += np.cos(particles[:, 2]) * d56particles[:, 1] += np.sin(particles[:, 2]) * d575859def update(particles, weights, z, R, landmarks):60weights.fill(1.)61for i, landmark in enumerate(landmarks):62distance = np.linalg.norm(particles[:, 0:2] - landmark, axis=1)63weights *= scipy.stats.norm(distance, R).pdf(z[i])6465weights += 1.e-30066weights /= sum(weights) # normalize676869def neff(weights):70return 1. / np.sum(np.square(weights))717273def resample(particles, weights):74N = len(particles)75cumulative_sum = np.cumsum(weights)76cumulative_sum[-1] = 1. # avoid round-off error77indexes = np.searchsorted(cumulative_sum, random(N))7879# resample according to indexes80particles[:] = particles[indexes]81weights[:] = weights[indexes]82weights /= np.sum(weights) # normalize838485def resample_from_index(particles, weights, indexes):86particles[:] = particles[indexes]87weights[:] = weights[indexes]88weights /= np.sum(weights)899091def estimate(particles, weights):92""" returns mean and variance """93pos = particles[:, 0:2]94mu = np.average(pos, weights=weights, axis=0)95var = np.average((pos - mu)**2, weights=weights, axis=0)9697return mu, var9899100def mean(particles, weights):101""" returns weighted mean position"""102return np.average(particles[:, 0:2], weights=weights, axis=0)103104105106def residual_resample(w):107108N = len(w)109110w_ints = np.floor(N*w).astype(int)111residual = w - w_ints112residual /= sum(residual)113114indexes = np.zeros(N, 'i')115k = 0116for i in range(N):117for j in range(w_ints[i]):118indexes[k] = i119k += 1120cumsum = np.cumsum(residual)121cumsum[N-1] = 1.122for j in range(k, N):123indexes[j] = np.searchsorted(cumsum, random())124125return indexes126127128129def residual_resample2(w):130131N = len(w)132133w_ints =np.floor(N*w).astype(int)134135R = np.sum(w_ints)136m_rdn = N - R137138Ws = (N*w - w_ints)/ m_rdn139indexes = np.zeros(N, 'i')140i = 0141for j in range(N):142for k in range(w_ints[j]):143indexes[i] = j144i += 1145cumsum = np.cumsum(Ws)146cumsum[N-1] = 1 # just in case147148for j in range(i, N):149indexes[j] = np.searchsorted(cumsum, random())150151return indexes152153154155def systemic_resample(w):156N = len(w)157Q = np.cumsum(w)158indexes = np.zeros(N, 'int')159t = np.linspace(0, 1-1/N, N) + random()/N160161i, j = 0, 0162while i < N and j < N:163while Q[j] < t[i]:164j += 1165indexes[i] = j166i += 1167168return indexes169170171172173174def Gaussian(mu, sigma, x):175176# calculates the probability of x for 1-dim Gaussian with mean mu and var. sigma177g = (np.exp(-((mu - x) ** 2) / (sigma ** 2) / 2.0) /178np.sqrt(2.0 * np.pi * (sigma ** 2)))179for i in range(len(g)):180g[i] = max(g[i], 1.e-229)181return g182183184if __name__ == '__main__':185186DO_PLOT_PARTICLES = False187from numpy.random import seed188import matplotlib.pyplot as plt189190#plt.figure()191192seed(5)193for count in range(10):194print()195print(count)196197N = 4000198sensor_std_err = .1199landmarks = np.array([[-1, 2], [2,4], [10,6], [18,25]])200NL = len(landmarks)201202203particles = create_uniform_particles((0,20), (0,20), (0, 6.28), N)204weights = np.zeros(N)205206#if DO_PLOT_PARTICLES:207# plt.scatter(particles[:, 0], particles[:, 1], alpha=.2, color='g')208209xs = []210for x in range(18):211zs = []212pos=(x+1, x+1)213214for landmark in landmarks:215d = np.sqrt((landmark[0]-pos[0])**2 + (landmark[1]-pos[1])**2)216zs.append(d + randn()*sensor_std_err)217218219zs = np.linalg.norm(landmarks - pos, axis=1) + randn(NL)*sensor_std_err220221222# move diagonally forward to (x+1, x+1)223224predict(particles, (0.00, 1.414), (.2, .05))225226update(particles, weights, z=zs, R=sensor_std_err, landmarks=landmarks)227if x == 0:228print(max(weights))229#while abs(pf.neff() -N) < .1:230# print('neffing')231# pf.create_uniform_particles((0,20), (0,20), (0, 6.28))232# pf.update(z=zs)233#print(pf.neff())234#indexes = residual_resample2(pf.weights)235indexes = systemic_resample(weights)236237resample_from_index(particles, weights, indexes)238#pf.resample()239240mu, var = estimate(particles, weights)241xs.append(mu)242if DO_PLOT_PARTICLES:243plt.scatter(particles[:, 0], particles[:, 1], alpha=.2)244plt.scatter(pos[0], pos[1], marker='*', color='r')245plt.scatter(mu[0], mu[1], marker='s', color='r')246plt.pause(.01)247248xs = np.array(xs)249plt.plot(xs[:, 0], xs[:, 1])250plt.show()251252253