Use python3 to learn the use of support vector machine api in sklearn
You can come to my git to download the source code: https://github.com/linyi0604/kaggle
1 #Import the handwritten font loader
2 from sklearn.datasets import load_digits
3 from sklearn.cross_validation import train_test_split
4 from sklearn.preprocessing import StandardScaler
5 from sklearn.svm import LinearSVC
6 from sklearn.metrics import classification_report
7
8 '''
9 support vectors The machine
10 searches for the best one of all possible linear classifiers according to the distribution of training samples.
11 Screen the most effective few training samples from high-latitude data.
12 Save data memory and improve prediction performance
13 But pay more cpu and computing time
14 '''
15
16 '''
17 1 Get data
18 '''
19 #Get the digital image data of the handwritten font digits through the data loader and store it in the digits variable
20 digits = load_digits ()
21 #View the feature dimension and scale of the data
22 # print(digits.data.shape) # (1797, 64)
23
24 '''
25 2 Split training set and test set
26 '''
27 x_train, x_test, y_train , y_test = train_test_split(digits.data,
28 digits.target,
29 test_size=0.25 ,
30 random_state=33 )
31
32 '''
33 3 Use support vector machine classification model to identify digital images
34 '''
35 #Standardize training data and test data
36 ss = StandardScaler()
37 x_train = ss.fit_transform(x_train)
38 x_test = ss.fit_transform(x_test)
39
40 #Initialize the support vector machine classifier for the linear hypothesis
41 lsvc = LinearSVC()
42 #Training 43 lsvc.fit
(x_train, y_train)
44 #Use the trained model to predict the test set The test results are stored in y_predict
45 y_predict = lsvc.predict(x_test)
46
47 '''
48 4 Support vector machine classifier model capability evaluation
49 '''
50 print ( " Accuracy: " , lsvc.score(x_test, y_test))
51 print ( " Other evaluation data:\n " , classification_report(y_test, y_predict, target_names= digits.target_names.astype(str)))
52 '''
53 accurate Rate: 0.
9488888888888889 54 Other evaluation data:
precision recall f1-score support 55 precision recall f1-score support
56
57 0 0.92 0.97 0.94 35
58 1 0.95 0.98 0.96 54
59 2 0.98 1.00 0.99 44
60 3 0.93 0.93 0.93 46
61 4 0.97 1.00 0.99 35
62 5 0.94 0.94 0.94 48
63 6 0.96 0.98 0.97 51
64 7 0.90 1.00 0.95 35
65 8 0.98 0.83 0.90 58
66 9 0.95 0.91 0.93 44
67
68 avg / total 0.95 0.95 0.95 450
69 '''