优化smo算法

和简单smo方法的区别是在寻找aj时会选择那些步长最大的点,从而使得变化最大,提升效率

  import matplotlib.pyplot as plt
  import numpy as np
  import random

class optStruct:                                            #使用类结构,方便传递参数
    def __init__(self,dataMatIn,classLabels,C,toler):
        self.X = dataMatIn
        self.labelMat = classLabels
        self.C = C
        self.tol = toler
        self.m = np.shape(dataMatIn)[0]
        self.alphas = np.mat(np.zeros((self.m,1)))
        self.b = 0
        self.eCache = np.mat(np.zeros((self.m,2)))

def loadDataSet(fileName):  
    dataMat = [];labelMat = []
    fr = open(fileName)
    for line in fr.readlines():
        lineArr = line.strip().split('\t')
        dataMat.append([float(lineArr[0]),float(lineArr[1])])
        labelMat.append(float(lineArr[-1]))
    return dataMat,labelMat

def calcEk(oS,k):                                           #计算误差
    fXk = float(np.multiply(oS.alphas,oS.labelMat).T*(oS.X*oS.X[k,:].T) + oS.b)
    Ek = fXk - float(oS.labelMat[k])
    return Ek

def selectJrand(i,m):
    j = i
    while(j == i):
        j = int(random.uniform(0,m))
    return j

def selectJ(i,oS,Ei):                                       #选择步长最大的aj
    maxK = -1
    maxDeltaE = 0
    Ej = 0
    oS.eCache[i] = [1,Ei]
    validEcacheList = np.nonzero(oS.eCache[:,0].A)[0]
    if(len(validEcacheList) > 1):
        for k in validEcacheList:
            if k==i:continue
            Ek = calcEk(oS,k)
            deltaE = abs(Ei-Ek)
            if(deltaE > maxDeltaE):
                maxK = k
                maxDeltaE = deltaE
                Ej = Ek
        return maxK,Ej
    else:
        j = selectJrand(i,oS.m)
        Ej = calcEk(oS,j)
    return j,Ej

def updateEk(oS,k):                                         #更新Ek并存储
    Ek = calcEk(oS,k)
    oS.eCache[k] = [1,Ek]

def clipAlpha(aj,H,L):
    if aj > H:
        aj = H
    if aj < L:
        aj = L
    return aj

def innerL(i,oS):                                           #更新a1,a2和b
    Ei = calcEk(oS,i)
    if((oS.labelMat[i] * Ei< -oS.tol)and(oS.alphas[i] < oS.C)) or ((oS.labelMat[i]*Ei> oS.tol)and(oS.alphas[i]> 0)):
        j,Ej = selectJ(i,oS,Ei)
        alphaIold = oS.alphas[i].copy()
        alphaJold = oS.alphas[j].copy()
        if(oS.labelMat[i]!= oS.labelMat[j]):
            L=max(0,oS.alphas[j] - oS.alphas[i])
            H=min(oS.C,oS.C+oS.alphas[j] - oS.alphas[i])
        else:
            L=max(0,oS.alphas[j]+oS.alphas[i] - oS.C)
            H=min(oS.C,oS.alphas[j]+oS.alphas[i])
        if L == H:
            print("L==H")
            return 0
        eta = 2.0*oS.X[i,:]*oS.X[j,:].T - oS.X[i,:]*oS.X[i,:].T - oS.X[j,:]*oS.X[j,:].T
        if eta >= 0:
            print("eta>=0")
        oS.alphas[j] -= oS.labelMat[j] * (Ei - Ej)/eta
        oS.alphas[j] = clipAlpha(oS.alphas[j],H,L)
        updateEk(oS,j)
        if(abs(oS.alphas[j] - alphaJold) < 0.00001):
            print("AlphaJ变化太小")
            return 0
        oS.alphas[i] += oS.labelMat[j]*oS.labelMat[i]*(alphaJold - oS.alphas[j])
        updateEk(oS,i)
        b1 = oS.b - Ei- oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.X[i,:]*oS.X[i,:].T - oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.X[i,:]*oS.X[j,:].T
        b2 = oS.b-Ej-oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.X[i,:]*oS.X[j,:].T - oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.X[j,:]*oS.X[j,:].T
        if(0<oS.alphas[i])and(oS.C>oS.alphas[i]):oS.b = b1
        elif(oS.alphas[j]>0)and(oS.C>alphas[j]):oS.b = b2
        else:oS.b = (b1+b2)/2
        return 1
    else:
        return 0

def smoP(dataMatIn,classLabels,C,toler,maxIter):              #smo改进算法主体
    oS = optStruct(np.mat(dataMatIn),np.mat(classLabels).transpose(),C,toler)
    iteration = 0
    entireSet = True                                          #决定ai是选择内部点还是外部点
    alphaPairsChanged = 0
    while(iteration<maxIter)and((alphaPairsChanged>0)or(entireSet)):
         alphaPairsChanged = 0
        if entireSet:
            for i in range(oS.m):
                alphaPairsChanged += innerL(i,oS)
                print("全部遍历:第%d次迭代 样本:%d,alpha优化次数:%d"%(iteration,i,alphaPairsChanged))
            iteration += 1
        else:
            nonBoundIs = np.nonzero((oS.alphas.A > 0) * \     #计算非边界点数量
            (oS.alphas.A<C))[0]      
            for i in nonBoundIs :
                alphaPairsChanged += innerL(i,oS)
                print("非边界遍历:第%d次迭代 样本:%d,alpha优化次数:%d"%(iteration,i,alphaPairsChanged))
            iteration +=1
        if entireSet:
            entireSet = False                                 #若计算过边界点,则下次计算非边界点
        elif(alphaPairsChanged == 0):                         #若上次计算非边界点但没有变化,则下次计算边界点
            entireSet = True
        print("迭代次数:%d"%iteration)
    return oS.b,oS.alphas

def showClassifer(dataMat,classLabels,w,b,alphas):            #绘图
    dataPlus = []
    dataMinus = []
    for i in range(len(dataMat)):
        if classLabels[i] > 0:
            dataPlus.append(dataMat[i])
        else:
            dataMinus.append(dataMat[i])
    dataPlusNp = np.array(dataPlus)
    dataMinusNp = np.array(dataMinus)
    plt.scatter(np.transpose(dataPlusNp)[0],np.transpose(dataPlusNp)[1],s=30,alpha=0.7)
    plt.scatter(np.transpose(dataMinusNp)[0],np.transpose(dataMinusNp)[1],s=30,alpha=0.7)
    x1 = max(dataMat)[0]
    x2 = min(dataMat)[0]
    a1,a2=w
    b=float(b)
    a1 = float(a1[0])
    a2 = float(a2[0])
    y1,y2 = (-b-a1*x1)/a2,(-b-a1*x2)/a2
    plt.plot([x1,x2],[y1,y2])
    for i,alpha in enumerate(alphas):
        if abs(alpha)>0:
            x,y=dataMat[i]
            plt.scatter([x],[y],s=150,c='none',alpha=0.7,linewidth=1.5,edgecolor='red')
    plt.show()

def calcWs(alphas,dataArr,classLabels):
    X = np.mat(dataArr)
    labelMat = np.mat(classLabels).transpose()
    m,n = np.shape(X)
    w = np.zeros((n,1))
    for i in range(m):
        w += np.multiply(alphas[i]*labelMat[i],X[i,:].T)
    return w

猜你喜欢

转载自blog.csdn.net/JachinMa/article/details/88919987