- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np" L) B0 |/ }, P& S8 p* D
import matplotlib.pyplot as plt
4 K W* a6 q0 Z0 p) Q3 P9 f g+ ]3 a' N+ I% d& A
import utilities 4 g9 M8 N$ c, t: D1 T1 }
; o( n5 e/ D, P5 l6 z0 d6 L. N
# Load input data
% m' t2 f7 o( v% U9 v9 y" D' Linput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
* W3 |2 Y ~5 O' ~7 @! F( `9 R; {8 jX, y = utilities.load_data(input_file)
% D. {9 Y, h1 G- N) J1 p' a
% H( p8 v2 S. N y+ o###############################################- W* J" `& D0 y$ B7 T- W
# Separate the data into classes based on 'y'
7 {7 a8 ?' N% H! H7 z" U) j' a, Kclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
1 E, p% H" _3 P* t; S% Mclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
: G& q5 X% E' Y4 w4 P; G: d) a% U8 J: _$ m
# Plot the input data
) [- s6 K' `) P: mplt.figure()
; Q5 k9 T+ L) u# }plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')0 Q' C2 l- f( p; ?( c2 f. ], j' ]
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
0 F4 Y/ ~9 A- e/ g% A' splt.title('Input data')
9 e8 H a. v, e- S$ p$ K) ]0 r0 `" z/ @
###############################################0 `: j* w! S+ L/ ] E! @8 y) F
# Train test split and SVM training; v( P9 S: k- y1 l6 W U- s
from sklearn import cross_validation e4 l4 ]1 Q' T* P2 B
from sklearn.svm import SVC
( T% D: u' X# J, j% D. }( p
) f, B' [+ r- v) b- k) CX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
9 D9 ^( ~( [4 o! r9 `/ T' d1 i& V2 q2 r
#params = {'kernel': 'linear'}
# z# R# Q W" W7 u5 t#params = {'kernel': 'poly', 'degree': 3}
4 I2 M$ K2 S2 S$ wparams = {'kernel': 'rbf'}$ s1 K& t: J( \+ ]8 P: E
classifier = SVC(**params)
8 D. |/ f& [; s$ m# cclassifier.fit(X_train, y_train)
: B. W! `3 C5 k9 B. c H) mutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset') B$ Q' h* ^, }* `. k7 q$ P/ J$ x/ j
" ~9 p# B2 o/ {; k, ?3 }, ^
y_test_pred = classifier.predict(X_test)
( |3 T- O( r% {2 c2 R% tutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
( U/ N) T: t# R
' y9 E) r" [5 M! W7 \$ a, {+ ]###############################################; a( `2 N* ^- Y3 |
# Evaluate classifier performance N! B3 J$ Z, Y$ s' e
( ^) _, o, v0 l8 `+ Xfrom sklearn.metrics import classification_report4 @. H i5 B5 j% r
6 Z" f% Q/ `& U
target_names = ['Class-' + str(int(i)) for i in set(y)]
5 l: d- e4 @9 z4 Nprint "\n" + "#"*301 E6 x, K) z+ w9 X. {% f/ j
print "\nClassifier performance on training dataset\n"
- G9 O/ U" L. j$ lprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)! A" F( x/ A/ D, ^/ H
print "#"*30 + "\n"
$ v- q/ p% |/ _/ l
8 u: R* l( O- ?( @+ [7 \print "#"*30
- ?" {3 S' F$ b, Wprint "\nClassification report on test dataset\n"+ I2 f9 ]# N5 W+ o; U
print classification_report(y_test, y_test_pred, target_names=target_names)
% f/ c$ h* ?7 o' Sprint "#"*30 + "\n"
! [5 @) f3 j9 l0 o. x+ \( M2 \
2 j' `& {/ _3 _/ x |
|