kd-tree找最邻近点 Python实现

kd-tree找最邻近点 Python实现

基本概念

kd-tree是KNN算法的一种实现。算法的基本思想是用多维空间中的实例点,将空间划分为多块,成二叉树形结构。划分超矩形上的实例点是树的非叶子节点,而每个超矩形内部的实例点是叶子结点。

超矩形划分方法

有数据集datalist,其中的数据是Xi,每个Xi由多个特征值组成。首先将所有数据的Xi[0]找出,取得Xi[0]的中位数center,在树的根节点中保存Xi[0] == center的实例点Xi,树根的左子树递归构造Xi[0] < center的数据集,树根的右子树构造X[0] > center的数据集。同时在第二层,划分特征变为Xi[1], 划分特征随着树的深度改变,为 (d - 1) % k , k是特征的维度,d是此时划分的树深度。

python实现

import numpy as np
import matplotlib.pyplot as plot
import math
# kdtree类


class KdTree(object):
    """docstring for KdTree."""

    def __str__(self):
        return '{ nodes:' + str(self.nodes) + ', left:' + str(self.l) + ', right:' + str(self.r) + '}'

    def __init__(self):
        self.split = None
        self.l = None
        self.r = None
        self.f = None
        self.nodes = []


def createKdTree(split, datalist, k):
    if datalist is None or len(datalist) == 0:
        return None
    split = split % k
    node = KdTree()
    # 求中位数
    center = np.sort([a[split] for a in datalist])[int(len(datalist)/2)]
    leftData = [a for a in datalist if a[split] < center]
    rightData = [a for a in datalist if a[split] > center]
    node.split = split
    node.nodes = [a for a in datalist if a[split] == center]
    node.l = createKdTree(split+1, leftData, k)
    node.r = createKdTree(split+1, rightData, k)
    # 设置双亲节点
    if not node.l is None:
        node.l.f = node
    if not node.r is None:
        node.r.f = node
    return node

# 构建训练数据


def createData():
    k = 2
    datalist = None
    # 构造 三类满足正态分布的类型
    X_01 = np.random.randn(20) + 2
    X_02 = np.random.randn(20) + 3
    Y_0 = np.full(20, 1)
    # 第二类
    X_11 = np.random.randn(20) + 2
    X_12 = 2*np.random.randn(20) + 10
    Y_1 = np.full(20, 2)
    # 第三类
    X_21 = np.random.randn(20) + 8
    X_22 = 2*np.random.randn(20) + 10
    Y_2 = np.full(20, 3)
    # 合并
    X_1 = np.append(np.append(X_01, X_11), X_21)
    X_2 = np.append(np.append(X_02, X_12), X_22)
    Y = np.append(np.append(Y_0, Y_1), Y_2)
    return (list(zip(X_1, X_2, Y)), k)


# 预测,返回测试集X中每个实例属于哪一类
def predict(head, X):
    res = []
    for x in X:
        res.append(guessX(head, x))
    return res


def guessX(node, x):
    if x[node.split] < node.nodes[0][node.split] and not node.l is None:
        return guessX(node.l, x)
    elif x[node.split] > node.nodes[0][node.split] and not node.r is None:
        return guessX(node.r, x)
    else:
        return neast(node, x)


def getDis(X1, X2):
    l = len(X2)
    sum = 0
    for i in range(l):
        sum += (X1[i] - X2[i])**2
    return sum


def get2Min(nodes, x, minDis, minX):
    for n in nodes:
        d = getDis(n, x)
        if d < minDis:
            minDis = d
            minX = n
    return (minDis, minX)


def neast(node, x):
    nodes = node.nodes
    dis = []
    minDis = 10000
    minX = 0
    minDis, minX = get2Min(nodes, x, minDis, minX)
    if node.f is None:
        return minX[-1]
    return findY(node.f, x, minDis, minX, 0 if node.f.l == node else 1)


def findY(node, x, minDis, minX, dire):
    if math.fabs(node.nodes[0][node.split] - x[node.split]) > minDis:
        return minX[-1]
    minDis, minX = get2Min(node.nodes, x, minDis, minX)
    minDis, minX = reachAll(node.r if dire == 0 else node.l, x, minDis, minX)
    if node.f is None:
        # print(minX)
        return minX[-1]
    return findY(node.f, x, minDis, minX, 0 if node.f.l == node else 1)


def reachAll(node, x, minDis, minX):
    if node is None:
        return (minDis, minX)
    minDis, minX = get2Min(node.nodes, x, minDis, minX)
    minDis, minX = reachAll(node.l, x, minDis, minX)
    minDis, minX = reachAll(node.r, x, minDis, minX)
    return (minDis, minX)


if __name__ == '__main__':
    # 特征 + 类别
    datalist, k = createData()
    head = createKdTree(0, datalist, k)

    # 测试数据
    X_1test = np.random.randn(5) + 2
    X_2test = 2*np.random.randn(5) + 10
    X = list(zip(X_1test, X_2test))

    # 结果以list形式返回
    res = predict(head, X)
    print(res)

猜你喜欢

转载自blog.csdn.net/qq_35170267/article/details/83046180