The kd-tree can be used to organize efficient search for nearest neighbors in a k-dimensional space.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 | import collections
import itertools
import math
def square_distance(a, b):
s = 0
for x, y in itertools.izip(a, b):
d = x - y
s += d * d
return s
Node = collections.namedtuple("Node", 'point axis label left right')
class KDTree(object):
"""A tree for nearest neighbor search in a k-dimensional space.
For information about the implementation, see
http://en.wikipedia.org/wiki/Kd-tree
Usage:
objects is an iterable of (point, label) tuples
k is the number of dimensions
t = KDTree(k, objects)
point, label, distance = t.nearest_neighbor(destination)
"""
def __init__(self, k, objects=[]):
def build_tree(objects, axis=0):
if not objects:
return None
objects.sort(key=lambda o: o[0][axis])
median_idx = len(objects) // 2
median_point, median_label = objects[median_idx]
next_axis = (axis + 1) % k
return Node(median_point, axis, median_label,
build_tree(objects[:median_idx], next_axis),
build_tree(objects[median_idx + 1:], next_axis))
self.root = build_tree(list(objects))
def nearest_neighbor(self, destination):
best = [None, None, float('inf')]
# state of search: best point found, its label,
# lowest squared distance
def recursive_search(here):
if here is None:
return
point, axis, label, left, right = here
here_sd = square_distance(point, destination)
if here_sd < best[2]:
best[:] = point, label, here_sd
diff = destination[axis] - point[axis]
close, away = (left, right) if diff <= 0 else (right, left)
recursive_search(close)
if diff ** 2 < best[2]:
recursive_search(away)
recursive_search(self.root)
return best[0], best[1], math.sqrt(best[2])
if __name__ == '__main__':
from random import random
k = 5
npoints = 1000
lookups = 1000
eps = 1e-8
points = [(tuple(random() for _ in xrange(k)), i)
for i in xrange(npoints)]
tree = KDTree(k, points)
for _ in xrange(lookups):
destination = [random() for _ in xrange(k)]
_, _, mindistance = tree.nearest_neighbor(destination)
minsq = min(square_distance(p, destination) for p, _ in points)
assert abs(math.sqrt(minsq) - mindistance) < eps
|
For an explanation of how a kd-tree works, see the Wikipedia page.
Implementation and test of adding/removal of single nodes and k-nearest-neighbors search (hint -- turn best in a list of k found elements) should be pretty easy and left as an exercise for the commentor :-)