# 【收藏】支持向量机原理详解+案例+代码！【点击阅读原文下载】

+关注继续查看

SVM 是如何工作的？

●  想象一个我们需要的新空间：
●  z = x² + y²
●  找到新空间中点积的形式：
●  a · b = xa· xb + ya· yb + za· zb
●  a · b = xa· xb + ya· yb + (xa² + ya²) · (xb² + yb²)
●  让 SVM 处理新的点积结果——这就是核函数

Python实现

iris.txt 原始数据集

iris_train.txt 训练数据集

iris_test.txt 测试数据集

SVM.py 未采用pca降维的SVM分类器

SVM_PCA.py 采用pca降维的SVM分类器

SVM.py代码如下：

 1#!/usr/bin/python 2#-*- coding: utf-8 -*- 3from numpy import * 4import matplotlib.pyplot as plt 5import matplotlib.animation as ai 6import numpy as np 7import time 8 9def loadData(): #加载函数 10 dataMat = [] 11 labelMat1 = [] 12 labelMat2 = [] 13 labelMat3 = [] 14 ylabel = [] 15 fr = open('iris_train.txt') 16 for line in fr.readlines(): 17 lineArr = line.strip().split(',') 18 dataMat.append([float(lineArr[0]), float(lineArr[1]), float(lineArr[2]), float(lineArr[3])]) 19 if(lineArr[4]=='Iris-setosa'): 20 labelMat1.append(float(1)) 21 else: 22 labelMat1.append(float(-1)) 23 if(lineArr[4]=='Iris-versicolor'): 24 labelMat2.append(float(1)) 25 else: 26 labelMat2.append(float(-1)) 27 if(lineArr[4]=='Iris-virginica'): 28 labelMat3.append(float(1)) 29 else: 30 labelMat3.append(float(-1)) 31 ylabel.append(lineArr[4]) 32 return dataMat,labelMat1,labelMat2,labelMat3,ylabel 33 34def loadData_test(): 35 dataMat = [] 36 ylabel = [] 37 fr = open('iris_test.txt') 38 for line in fr.readlines(): 39 lineArr = line.strip().split(',') 40 dataMat.append([float(lineArr[0]), float(lineArr[1]), float(lineArr[2]), float(lineArr[3])]) 41 ylabel.append(lineArr[4]) 42 return dataMat,ylabel 43 44 45def pca(dataMat, topNfeat):  46 meanVals = mean(dataMat, axis = 0) #求平均值  47 meanRemoved = dataMat - meanVals #去平均值  48 covMat = cov(meanRemoved,rowvar=0) #计算协防差矩阵  49 eigVals, eigVects = linalg.eig(mat(covMat))  50 eigValInd = argsort(eigVals)  51 #从小到大对N个值排序  52 eigValInd = eigValInd[: -(topNfeat + 1) : -1]  53 redEigVects = eigVects[:, eigValInd] 54 #将数据转换到新空间  55 lowDDataMat = meanRemoved * redEigVects  56 #reconMat = (lowDDataMat * redEigVects.T) + meanVals  57 return lowDDataMat 58 59def selectJrand(i,m): 60 j=i #排除i 61 while (j==i): 62 j = int(random.uniform(0,m)) 63 return j 64 65def clipAlpha(aj,H,L): 66 if aj > H: 67 aj = H 68 if L > aj: 69 aj = L 70 return aj 71 72def smoSimple(dataMatrix, classLabels, C, toler, maxIter): 73 labelMat = mat(classLabels).T 74 b = -1; m,n = shape(dataMatrix)  75 alphas = mat(zeros((m,1))) 76 iter = 0 77 while (iter < maxIter): 78 alphaPairsChanged = 0 #alpha是否已经进行了优化 79 for i in range(m): 80 # w = alpha * y * x; f(x_i) = w^T * x_i + b 81 # 预测的类别 82 fXi = float(multiply(alphas,labelMat).T*dataMatrix*dataMatrix[i,:].T) + b  83 Ei = fXi - float(labelMat[i]) #得到误差，如果误差太大，检查是否可能被优化 84 #必须满足约束 85 if ((labelMat[i]*Ei < -toler) and (alphas[i] < C)) or ((labelMat[i]*Ei > toler) and (alphas[i] > 0)):  86 j = selectJrand(i,m) 87 fXj = float(multiply(alphas,labelMat).T*(dataMatrix*dataMatrix[j,:].T)) + b 88 Ej = fXj - float(labelMat[j]) 89 alphaIold = alphas[i].copy(); alphaJold = alphas[j].copy()  90 if (labelMat[i] != labelMat[j]):  91 L = max(0, alphas[j] - alphas[i]) 92 H = min(C, C + alphas[j] - alphas[i]) 93 else: 94 L = max(0, alphas[j] + alphas[i] - C) 95 H = min(C, alphas[j] + alphas[i]) 96 if L==H: #print "L==H";  97 continue 98 # Eta = -(2 * K12 - K11 - K22)，且Eta非负，此处eta = -Eta则非正 99 eta = 2.0 * dataMatrix[i,:]*dataMatrix[j,:].T - dataMatrix[i,:]*dataMatrix[i,:].T - dataMatrix[j,:]*dataMatrix[j,:].T100 if eta >= 0: #print "eta>=0"; 101 continue102 alphas[j] -= labelMat[j]*(Ei - Ej)/eta103 alphas[j] = clipAlpha(alphas[j],H,L)104 #如果内层循环通过以上方法选择的α_2不能使目标函数有足够的下降，那么放弃α_1105 if (abs(alphas[j] - alphaJold) < 0.00001): #print "j not moving enough"; 106 continue107 alphas[i] += labelMat[j]*labelMat[i]*(alphaJold - alphas[j])108 b1 = b - Ei- labelMat[i]*(alphas[i]-alphaIold)*dataMatrix[i,:]*dataMatrix[i,:].T - labelMat[j]*(alphas[j]-alphaJold)*dataMatrix[i,:]*dataMatrix[j,:].T109 b2 = b - Ej- labelMat[i]*(alphas[i]-alphaIold)*dataMatrix[i,:]*dataMatrix[j,:].T - labelMat[j]*(alphas[j]-alphaJold)*dataMatrix[j,:]*dataMatrix[j,:].T110 if (0 < alphas[i]) and (C > alphas[i]): b = b1111 elif (0 < alphas[j]) and (C > alphas[j]): b = b2112 else: b = (b1 + b2)/2.0113 alphaPairsChanged += 1114 if (alphaPairsChanged == 0): iter += 1115 else: iter = 0116 return b,alphas117118def calcWs(alphas,dataMatrix, labelMat):119 m,n = shape(dataMatrix) 120 w = zeros((n,1))121 for i in range(m):122 w += multiply(alphas[i]*labelMat[i],dataMatrix[i,:].T)123 return w124125def pred(dataMat, labelMat, w1, b1,w3,b3):126 dataMat = mat(dataMat)127 sum1 =0 128 m,n = shape(dataMat)129 for i in range(m):130 if(dataMat[i]*w1 + b1 > 0.0 and labelMat[i]=='Iris-setosa'):131 sum1 +=1132 elif(dataMat[i]*w3 + b3 > 0.0 and labelMat[i]=='Iris-virginica'):133 sum1 +=1134 elif(dataMat[i]*w3 + b3 < 0.0 and dataMat[i]*w1 + b1 < 0.0 and labelMat[i]=='Iris-versicolor'):135 sum1 +=1136 m = float(sum1)/float(m)*100137 print "正确率为： " , m138139140xdata,ydata1,ydata2,ydata3,ylabe = loadData()141xdata_test, ylabe_test = loadData_test()142xdata = mat(xdata)143xdata_test = mat(xdata_test)144b1 , alphas1 = smoSimple(xdata,ydata1,0.8,0.0001,40)145#b2 , alphas2 = smoSimple(X,ydata2,0.8,0.0001,40)146b3 , alphas3 = smoSimple(xdata,ydata3,0.8,0.0001,40)147w1 = calcWs(alphas1,xdata,ydata1)148#w2 = calcWs(alphas2,X,ydata2)149w3 = calcWs(alphas3,xdata,ydata3)150pred(xdata_test, ylabe_test, w1, b1, w3, b3)

20年架构师用一文带你彻底搞懂SpringBoot嵌入式Web容器原理
Spring Boot嵌入式Web容器原理 Spring Boot的目标是构建“非常容易创建、独立、产品级别的基于Spring的应用”。这些应用是“立即可运行的”。在这个过程中，完全没有代码生成，不需要配置任何特殊的XML配置，为了这个目标，Spring Boot在Spring 4.0框架之上提供了很多特性，帮助应用以“约定优于配置”“开箱即用”的方式来启动应用并运行上下文。
56 0
【错误记录】Android Studio 编译时 Kotlin 代码编译报错 ( 升级支持库时处理 @NonNull 参数 )
【错误记录】Android Studio 编译时 Kotlin 代码编译报错 ( 升级支持库时处理 @NonNull 参数 )
107 0
【错误记录】Android Studio 编译时 Kotlin 代码编译报错 ( 升级支持库时处理 @NonNull 参数 )
【错误记录】Android Studio 编译时 Kotlin 代码编译报错 ( 升级支持库时处理 @NonNull 参数 )
40 0
OpenKruise v0.5.0 版本发布，支持无损的流式分批发布策略

564 0

782 0
R 支持向量机①

1021 0