sklearn自定义svm核函数(外部和内部定义)

我们可以使用自己编写的核函数:

*注意,如果使用precomputed模式,也就是不传入函数,而直接传入计算后的核,那么参与这个核计算的数据集要包含训练集和测试集

# coding=utf-8
import numpy as np
from sklearn import svm, datasets
from matplotlib.pylab import plt
from sklearn.utils import shuffle
from sklearn.metrics import zero_one_loss


if __name__ == "__main__":
    # 定义数据集
    X_train = np.array([[0.3, 0.4], [0, 0], [1, 1], [1.1, 1.1]])
    y_train = [0, 0, 1, 1]
    X_test = np.array([[0.2, 0.2], [0, 3], [1, -1], [5, 5]])
    y_test = [0, 1, 0, 1]
    #
    #
    #

    # 测试1
    def my_kernel(X, Y):  # 自定义核函数
        return np.dot(X, Y.T)
    clf = svm.SVC(kernel=my_kernel)
    clf.fit(X_train, y_train)
    result = clf.predict(X_test)
    print(result)
    #
    #
    #
    #
    #
    #
    #

    # 测试2:外部核计算
    clf = svm.SVC(kernel='precomputed')
    gram = np.dot(X_train, X_train.T)  # linear kernel computation,先在外部计算核
    clf.fit(gram, y_train)
    # predict on training examples
    # 当用precomputed模式的时候,测试集和训练集都要包含在kernel里面。
    gram_test = np.dot(X_test, X_train.T)
    result = clf.predict(gram_test)
    print(result)
    #
    #
    #
    #
    #
    #
    #
    #
    # import some data to play with
    iris = datasets.load_iris()
    X_train = iris.data[:, :2]  # we only take the first two features. We could
    #                       avoid this ugly slicing by using a two-dim dataset
    Y_train = iris.target


    def my_kernel(X, Y):
        """
        We create a custom kernel:
                     (2  0)
        k(X, Y) = X  (    ) Y.T
                     (0  1)
        """
        M = np.array([[2, 0], [0, 1.0]])
        return np.dot(np.dot(X, M), Y.T)


    h = .02  # step size in the mesh

    # we create an instance of SVM and fit out data.
    clf = svm.SVC(kernel=my_kernel)
    clf.fit(X_train, Y_train)

    # Plot the decision boundary. For that, we will assign a color to each
    # point in the mesh [x_min, x_max]x[y_min, y_max].
    x_min, x_max = X_train[:, 0].min() - 1, X_train[:, 0].max() + 1
    y_min, y_max = X_train[:, 1].min() - 1, X_train[:, 1].max() + 1
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
    Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
    #
    #

    # Put the result into a color plot
    Z = Z.reshape(xx.shape)
    plt.pcolormesh(xx, yy, Z, cmap=plt.cm.Paired)
    # Plot also the training points
    plt.scatter(X_train[:, 0], X_train[:, 1], c=Y_train, cmap=plt.cm.Paired, edgecolors='k')
    plt.title('3-Class classification using Support Vector Machine with custom'
              ' kernel')
    plt.axis('tight')
    plt.show()

    #
    #
    #
    #
    #
    #
    #
    #
    #
    #
    #
    # ------ 正确的姿势 --------------

    digits = datasets.load_digits()
    X, y = shuffle(digits.data, digits.target)
    X_train, X_test = X[:1000, :], X[1000:, :]
    y_train, y_test = y[:1000], y[1000:]

    svc = svm.SVC(kernel='precomputed')

    kernel_train = np.dot(X_train, X_train.T)  # linear kernel

    svc.fit(kernel_train, y_train)

    # kernel_test = np.dot(X_test, X_train[svc.support_, :].T)
    kernel_test = np.dot(X_test, X_train.T)
    y_pred = svc.predict(kernel_test)
    # print(zero_one_score(y_test, y_pred))
    print(zero_one_loss(y_test, y_pred))

猜你喜欢

转载自my.oschina.net/u/2996334/blog/1819734
今日推荐