- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
+ X1 ^0 x" I7 G# t9 b1 K; W# s' ~, ~import matplotlib.pyplot as plt
# q% [+ r, ?; i R v
) s9 D3 j2 j$ s! d5 R6 Eimport utilities : |1 Y4 N# g: Z
/ [1 \5 |2 a! H9 S, k0 I0 F
# Load input data W. Q% ]& r0 B, g) _
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
& P8 \8 G& u% w, e# HX, y = utilities.load_data(input_file)6 P% {7 h; x* @7 A% _% B7 O
0 ~" A4 T, M" n( {' X0 l###############################################
! B+ b" X; z& K9 u# Separate the data into classes based on 'y'" x! h- [$ h/ e n: Q6 Z- D h) D) N
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
0 w7 K$ ~+ |' M; U. ]class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
& a$ B; v* p! }4 `$ B+ m7 t/ Y6 F) X* a G4 M
# Plot the input data- J5 p$ U. ?+ K0 B; o. u
plt.figure()
' Y) N# q' s9 J0 V6 E$ y: w4 Hplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'): u9 S" C$ T, o4 V0 G
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
v* b! c/ \5 Y( V$ F Kplt.title('Input data')
& B& n6 W9 K- [# `
: _. i5 t) @. j5 w' s% A###############################################6 r9 j$ Z+ H0 I
# Train test split and SVM training6 I6 ?3 v1 C8 C+ ]
from sklearn import cross_validation: ~& f: n4 s% L0 t8 t
from sklearn.svm import SVC
* L1 v3 V1 K; F. A/ I( U
+ `5 J9 ]" n2 J4 w$ M7 U% [X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)- a/ ^' _6 Y/ \
8 |0 B; `: H& P
#params = {'kernel': 'linear'}! w; l- l7 n2 g+ C& m; B# A
#params = {'kernel': 'poly', 'degree': 3}" z+ O4 S( k1 n3 P. C8 z# l
params = {'kernel': 'rbf'}
9 e/ v& F8 n5 [& Eclassifier = SVC(**params)
+ m/ K* C, [% X9 c( \6 lclassifier.fit(X_train, y_train)
( e3 q# O- {4 f4 c, W" hutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
" U! W9 `3 |; j3 O$ b$ [# |" h" ?2 y& d' r& W; l" ~- D
y_test_pred = classifier.predict(X_test)
( v$ S1 h5 R) R$ L- P2 G! }5 sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')0 K9 |+ h$ W) e( }
5 S1 }! K# _) U8 W, V t2 {# D+ ~
###############################################
) C; y0 { w j4 `7 F& P, _- o* e# Evaluate classifier performance
g- N9 F9 m: }& v4 J0 h( ^# z2 V8 |8 }6 p. |
from sklearn.metrics import classification_report
: T1 [8 c" z. o2 Z9 j4 Z- ]; ^& d% C+ f" {; y+ w" j
target_names = ['Class-' + str(int(i)) for i in set(y)]
* q0 W& v$ U/ v# d+ aprint "\n" + "#"*30/ _( X, g5 S2 Z4 F! W8 b7 N) B
print "\nClassifier performance on training dataset\n"
3 S7 W& m g1 \( Bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
9 E R2 D2 P. ], Jprint "#"*30 + "\n"
3 _7 l: H9 k' G# L# X( O+ t6 `' l! B8 l! B3 ^1 I
print "#"*30
$ W0 }: I$ H: c* [print "\nClassification report on test dataset\n"
5 Z: q \* r) `) W; aprint classification_report(y_test, y_test_pred, target_names=target_names)
2 q6 \. v* a$ h3 g8 b$ z% i! v Kprint "#"*30 + "\n"
7 m; C$ @7 F# _/ v7 J% ?9 a$ }5 }& a5 F3 n* J
|
|