- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np& E% |# p, }4 J1 G8 S& L6 h
import matplotlib.pyplot as plt: B5 `4 U1 b' ~7 B* r8 Y
* N0 F# Z* G1 [6 H
import utilities 3 d4 Y* h, u) a) `
8 k5 O/ i# `* e; D. S7 c& |( [# Load input data
: J- y4 t' Z9 d3 \% ?input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'. ^ p, y' G" E5 J7 q2 i
X, y = utilities.load_data(input_file)* t0 U3 b+ |' @) Z* ^: P @; A
' S, b6 s# g( c' r* }
###############################################( [7 B! F6 N$ B# S9 X: w' m- [
# Separate the data into classes based on 'y'
6 l2 q" i. _- u9 z4 pclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
; Q* _8 X: P9 P. T4 D- L. yclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
2 i, h" r$ t( J, r( x4 n; p) `' p! o1 h* R
# Plot the input data- n J* S( v) z2 R
plt.figure()
& v. Z* E( x4 g" u- r- I3 I! Y8 z& }6 A( ]plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
6 I9 R5 m7 _& i3 `# e' [8 ?, hplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')+ q, l2 u* z. {4 \% U5 @
plt.title('Input data')
0 Q8 k0 R0 u9 R6 ]& N% X& O
/ Z G d3 e N' @( |0 t0 m6 f" ?###############################################
8 ^+ l3 R# h; b& D# Train test split and SVM training9 o7 q8 t, m* I- f- k% ]
from sklearn import cross_validation/ q4 t+ F# a5 v# N8 T, G
from sklearn.svm import SVC
$ b8 u1 Q* n6 ~; x: h
w: L! I! N' M: `+ \X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: e/ O# q% }& p* l/ Z& p h
+ ?$ Y% e: r' T- ^* S#params = {'kernel': 'linear'}
# ]: z1 n4 D; v8 t#params = {'kernel': 'poly', 'degree': 3}) g4 S, p: \; w0 a, X
params = {'kernel': 'rbf'}& p. G% t/ F1 k2 `9 Y3 \5 r
classifier = SVC(**params): o" e0 S3 @+ u6 I! y9 F9 t) I
classifier.fit(X_train, y_train)3 [( ~6 }! X" T+ ^6 I
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
# g+ D2 C; O+ ]( @5 }2 {0 y( m: q) u: A* |2 X& f
y_test_pred = classifier.predict(X_test)
5 k3 a. d3 r1 Outilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 D+ N! F& T. ^. M$ I/ A3 J- i
& Z. W" K$ T: h$ P###############################################9 y' p9 \' W8 D7 i, ]
# Evaluate classifier performance; c4 J1 W, N8 L7 l- A
! l- ~4 E8 I, t2 G
from sklearn.metrics import classification_report
( q1 a+ g" _! m; v1 A- V. b+ h: B2 O$ \( ^7 k
target_names = ['Class-' + str(int(i)) for i in set(y)]* X3 ]: L: h9 ~/ ?' k# n
print "\n" + "#"*30$ r$ r, V% a" f' U# u- @9 m
print "\nClassifier performance on training dataset\n"
; L( h' a0 d/ x9 O# D' S9 m8 j( {5 D3 c- Jprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)* M' P+ I3 I/ \, Z8 d
print "#"*30 + "\n"; X$ ]( W2 T* F9 |" v8 U. C( s
5 J2 u% d! n+ p- K# c
print "#"*30# C y1 s7 x- h( V' t) v6 @3 B
print "\nClassification report on test dataset\n"
" w) v' g- x# ^9 }print classification_report(y_test, y_test_pred, target_names=target_names)3 i+ ]) m% Z5 i% d+ ~. F. o
print "#"*30 + "\n"$ o# }# {* m2 W0 N+ Q7 X
3 r( L+ R4 X: A2 i |
|