- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np: ~- n- O/ b. C! ] l
import matplotlib.pyplot as plt
# z" b! K8 K" F4 T) A* B% w( W% f9 O
import utilities / l4 @5 D2 z5 V! ^/ ?, c6 O3 w) G6 `
* J9 C! i0 d$ J' v# Load input data6 T% w; t P& ?; ~9 d
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt' n: v7 D/ h) h5 H, h" F
X, y = utilities.load_data(input_file)
$ _5 t6 k* H7 c# D; p9 Y+ o- L
###############################################
. b6 ^& e; c/ s# Separate the data into classes based on 'y'( U {3 F8 g2 v" k
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])- l$ Y; H' y' i7 y
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
! C$ m+ S# T' P
" S* f& w F" s$ G$ k$ b+ Q# Plot the input data
) m7 r: O t" Gplt.figure()
) P9 |5 ~& q( c0 I! g6 Oplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
) |2 C4 h% C8 j! ^# _7 ?5 F7 \+ x2 Uplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')4 d, U+ E, i. @8 E( I, u
plt.title('Input data')
( o5 j! U4 A$ y4 j$ B3 }& i. Q, ?& U
###############################################" I ]2 F: C# b. o
# Train test split and SVM training
+ S) _4 J" g- k7 ?: F+ Tfrom sklearn import cross_validation* C5 v7 G; G2 {6 e2 m
from sklearn.svm import SVC
) b: D3 e. X; Y- k; t
" m" r% V/ P1 v: ~; V6 @* L) SX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)8 N( Q7 w' i" d A- o- ?: E
1 n( X1 {1 ]" E- H% g/ V1 n( n#params = {'kernel': 'linear'}
3 \% n, U% n: o4 s) K) W+ Y#params = {'kernel': 'poly', 'degree': 3}0 J, ~) j: ]; V$ u6 s
params = {'kernel': 'rbf'}
5 P3 v' b9 q- H+ E: ^6 dclassifier = SVC(**params)
* n! F* v7 O& N7 [% d5 Tclassifier.fit(X_train, y_train)& h( L. U( s8 } L
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 F- x- G' k0 @% z
' Z+ g: K2 i. C1 ]3 j) Ty_test_pred = classifier.predict(X_test)
4 W! k; |! e# m1 z6 D. U* D* Z, ]utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
; {; w5 q; @4 p, m$ w( d& i# _9 Y; x$ B' Z/ b" ?/ Y/ q1 z
###############################################% N( p4 k2 Z( W) J5 U6 f. s& W
# Evaluate classifier performance( o3 {- m, c8 N1 r5 e) r& S/ B
# {) l: v4 `# L9 dfrom sklearn.metrics import classification_report
) \+ I$ y8 d2 l
! O1 f( r2 t# X: \$ x2 g; gtarget_names = ['Class-' + str(int(i)) for i in set(y)]
. ~4 J+ M& c# g+ Fprint "\n" + "#"*30
9 b% N, g# \: |& P9 Hprint "\nClassifier performance on training dataset\n"* ~ I! X4 s: Q* q7 A. J
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)5 ?; e& D' H$ Q1 @
print "#"*30 + "\n"5 g; g3 Q+ w8 E) k, T: r$ k! u1 U1 |3 }
3 A+ g, T% B9 q: ]/ }print "#"*30) p4 k- N. K( v
print "\nClassification report on test dataset\n"
: S, R3 {8 L# g6 J3 B2 e! Uprint classification_report(y_test, y_test_pred, target_names=target_names) u7 S4 L7 K- p& ]7 h7 Q
print "#"*30 + "\n"3 C" p, \8 j0 N# p& b
+ L$ k" N' E8 q3 y, P! I' g% a |
|