- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
& g/ w0 r! K$ b. V2 }1 Eimport matplotlib.pyplot as plt
: q& c6 k9 K5 p- m, i0 |3 W! ^2 k- y) T# {6 S1 t5 g6 Z& N8 v
import utilities 0 U6 S, M; s; f; v
2 j& H/ p7 i5 d. d( B& v! |# Load input data
5 I; ^" G% S) [# I: p4 Qinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'# I6 s6 u# }4 _% e7 X6 w" K
X, y = utilities.load_data(input_file)
& W6 Z' ?! E+ N4 ~$ P1 Z
$ h+ v& {) G0 {. i; @8 p###############################################
/ ?: P$ x' P+ q* s% M1 q# Separate the data into classes based on 'y'
) N n2 K/ r U' E1 d; Kclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])- }. w% R3 O: H: H! o
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])7 A+ H- e$ C" B8 h% p) M
" | ]% K# [* _7 \# Plot the input data% {' B; y$ D+ y! w
plt.figure(). n+ s; @: D: ~ X* x6 T# X
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'); z; B8 h8 y8 P+ } T1 L- c7 b8 Q
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')% N8 b. e9 P( \$ h# W3 ^
plt.title('Input data')) P! L! F6 w- N) i& x
) U& B n% [# I; P
###############################################* U% d K l( d& h. c+ m
# Train test split and SVM training. b4 n5 z5 K, n- p3 i& s
from sklearn import cross_validation. b; l% H8 l, V0 r) \! S
from sklearn.svm import SVC
3 R3 t+ }, X" V) H# a: u: F
) t" g" k: T7 c" {X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
& F- q( q( u `# ?& f6 x) }; l# s$ D! z* }! R
#params = {'kernel': 'linear'}
9 b! s x& Z1 c) j#params = {'kernel': 'poly', 'degree': 3}- } o3 t8 G8 j) y
params = {'kernel': 'rbf'}
& q' r' t3 d1 ?0 N. ^9 pclassifier = SVC(**params)7 I9 \! i1 r+ ?
classifier.fit(X_train, y_train)
( P- t: J# Y c* B, I9 D5 V8 vutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')9 v6 N/ ]* e5 w' d6 W8 R
) o# @& T! y4 Z; ^0 k7 ~2 ay_test_pred = classifier.predict(X_test)3 Q2 k+ V& w) [/ Z" T
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
. }2 j' d' P" Y$ E ^7 t
; @- [+ H& F6 L###############################################
0 ~/ n4 Z* G$ M- {2 N3 M# Evaluate classifier performance1 w& R* Q: A1 W# u" \' x
4 p8 ^! t$ }% w1 T x
from sklearn.metrics import classification_report
! }( q+ F- J9 L9 z& p: e0 p$ R: b) ~- }' A5 E$ i6 O
target_names = ['Class-' + str(int(i)) for i in set(y)]- T3 b. D: n% t
print "\n" + "#"*30/ ~) U; C& Q l# ^( C
print "\nClassifier performance on training dataset\n"2 O9 k- \8 X9 ~
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
: b8 |% e# @7 ]) ?" ]print "#"*30 + "\n"
: }4 ~ ]/ {; T) [
- j2 Z! N* _% C3 k; o( Rprint "#"*30
; N1 B& p4 h$ W( D9 b) j, ^print "\nClassification report on test dataset\n"6 t+ [6 p! n9 A- A
print classification_report(y_test, y_test_pred, target_names=target_names): D! V+ |" |* Y. _
print "#"*30 + "\n"
3 S$ {3 E! a' K1 P* d! p0 ~5 f+ `; y8 T
|
|