- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 ]- ` r9 x. O) }+ ]) \: N0 ^import matplotlib.pyplot as plt
8 w( ^! e" ^+ b7 ^* o' @. E8 m) l
7 g0 a3 T, r4 F' [" f' gimport utilities
) v; s! T+ {/ v8 A( o
3 f& U9 S3 q0 W- d. c' a# Load input data' Z+ F& C0 m) e, z$ D/ `4 [
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
?5 q' |* ?9 H5 IX, y = utilities.load_data(input_file)
5 z9 @/ T/ p# \- i+ k; Z1 c. h" e3 `' X# O
###############################################5 z. w5 ]* Q3 D9 J v' w
# Separate the data into classes based on 'y'/ A) X1 s! ~6 g
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
% G5 Q6 I5 o# m( v) }. Jclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])% ?$ w2 s2 [3 N) w2 W9 R9 i
" S2 D8 @2 c& s5 l, h4 c
# Plot the input data
: D" ]- A1 l, x0 k7 c$ g& c" vplt.figure()
) n2 p/ R% \1 Y, N; T* Rplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
+ \8 W; R" k f+ x- h4 q. m& F* Gplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')6 x& W h1 I/ h( y) C
plt.title('Input data')3 g4 e% r! ^9 S \! v
3 j# D0 i+ ^! f) }
###############################################( {5 b0 S: H$ B" I( t
# Train test split and SVM training
+ w# ~1 p$ m2 lfrom sklearn import cross_validation) ^' s# Q4 Q+ S. n
from sklearn.svm import SVC
, r1 U( q+ t. P' K, P) z3 T, g! s* B2 ]2 }5 X0 W# \8 H
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)0 R; I: M1 c' v1 x
+ v0 p$ _& _- v7 r, {4 [#params = {'kernel': 'linear'}
4 F! n7 P+ I; r* A#params = {'kernel': 'poly', 'degree': 3}# G7 g2 q) K# y( J' b# A' E
params = {'kernel': 'rbf'}$ U G. f/ \( ~$ v
classifier = SVC(**params)5 z% X+ v. S# H; y9 ?& X$ m$ U
classifier.fit(X_train, y_train): E2 a0 L0 b* Q5 [1 D. p
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
2 N1 h: U1 V' g- m; H) G" e5 `! }* M$ i! `
y_test_pred = classifier.predict(X_test)
0 I" ~+ ?( J6 j3 }utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')4 M5 M+ P T: T# k# u5 g5 l0 Q
( p {! [' w6 h5 ^5 d- C1 m
###############################################" n4 o+ v+ [8 @
# Evaluate classifier performance
5 T7 k. @8 M z% a: @8 W& ^& p+ C, d7 u1 x7 R
from sklearn.metrics import classification_report$ z. K8 ?+ C, s- q- q, |0 J
; L8 a( M1 @+ Y
target_names = ['Class-' + str(int(i)) for i in set(y)]
% X! d; }* d/ M& p6 Aprint "\n" + "#"*30, \0 V$ i, ~( ~$ d- t, k) s3 Q
print "\nClassifier performance on training dataset\n"
( B! U, n! W9 s7 b( p, v' v5 wprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 D( {, \$ L" }$ |8 u- Sprint "#"*30 + "\n"
& Y! p5 I8 g8 J: c& {. ]! h
7 L. M; G- O3 S. `" B1 Bprint "#"*306 S: q) A% B' W c
print "\nClassification report on test dataset\n" L8 O3 [- Z# r/ u+ l
print classification_report(y_test, y_test_pred, target_names=target_names): z6 U D6 n7 F, Q' |1 P
print "#"*30 + "\n"
, H* o6 F, N. e+ k5 p( h s
# }- ~ @- U/ [# k+ {. M |
|