- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
* J( `7 K I+ x5 N1 d! z& Zimport matplotlib.pyplot as plt7 g) p8 y# h; b8 W3 e/ O" K& {
5 b6 o5 \# `6 u" A. F: ?
import utilities 9 j$ b% k! |4 h4 Y% V, l& J
# Z: J, D |* q+ [3 r% U, }3 z' w# Load input data2 R1 a( O6 O1 a/ b( Y
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'' X b' x0 h9 t
X, y = utilities.load_data(input_file)
% [) t5 c2 c9 N7 ^
( X3 W& |; {8 ^$ \' w7 O5 z###############################################" }' k1 X8 L; u6 r( U0 A- Z# m
# Separate the data into classes based on 'y'
) @" G; p* v% W1 K, x& Pclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])2 ~9 [& l" u) Y( e
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
" k( y2 L' ^, A/ ]" r. J5 i
! Q% s- `5 [; P" X! I# Plot the input data! P3 S3 B& A# P. R9 [$ \
plt.figure()
O* @% `( G$ G8 n( Y) g3 s; ?' Rplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')9 U) Z5 S& b9 O5 |0 H- Y
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
$ T! I# t7 d0 S- c" n. lplt.title('Input data')
; X( U i1 @; ]$ F" y" B
; {3 z: B5 [9 j( R###############################################
1 l. B! A& _, G. i# Train test split and SVM training6 o5 v+ M' X/ {! x
from sklearn import cross_validation
' Q2 ^) ~+ V$ ofrom sklearn.svm import SVC7 j. j& V- n: W
/ \' q! ^$ q$ t& Y3 p* i
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
; j( `4 v# x4 J& x% `% c0 i/ q
: d: W7 {( c2 W8 j' U9 N) f r#params = {'kernel': 'linear'}
5 p: T. T' }' H#params = {'kernel': 'poly', 'degree': 3}3 q; \* u( n3 b: G
params = {'kernel': 'rbf'}- N0 s9 h! t$ M; o- U8 ]& t
classifier = SVC(**params)
( E+ E1 Y( Y8 m# Y- Q$ Fclassifier.fit(X_train, y_train): i, }" }1 V+ U$ h8 \" P0 d5 q, u
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')4 x0 H; w4 n* c+ p, n
/ ~2 S) t; C3 w" o2 w ~& Xy_test_pred = classifier.predict(X_test)
+ v0 T' v$ p4 g8 Y% ~2 Hutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
: S q- U( V. N9 B O2 v7 V
; c7 U }) ^. P% ~###############################################
; I% H- t1 I h# Evaluate classifier performance
- @- _) S6 G9 f. b0 U6 @; j' H$ M) O$ u |
from sklearn.metrics import classification_report
+ ^5 {% M- [9 @& C) Y" T! e! n1 c5 `% y: }- i# C
target_names = ['Class-' + str(int(i)) for i in set(y)]
. K, B4 D4 W2 O7 C6 D; fprint "\n" + "#"*30
3 v, q# N; j- v* R* dprint "\nClassifier performance on training dataset\n"9 E0 N# F( w" w; p, S9 g1 B1 H7 Y
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
3 |2 |: X$ P. `4 a8 F2 I5 {print "#"*30 + "\n". |/ l6 H' i5 G1 O* x4 n
8 n% k& q- X( d9 e+ H- {: }6 {print "#"*309 ~, e8 U4 {1 ~" q) g
print "\nClassification report on test dataset\n"' F3 ~$ V# G' w' z9 |3 N
print classification_report(y_test, y_test_pred, target_names=target_names)
; m. N `. |' p& vprint "#"*30 + "\n"6 R! x z; ~. b2 Y2 c
* c7 S3 j# `" X: Q |
|