- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np& {( C' B9 S5 s% ]
import matplotlib.pyplot as plt
; Y" C2 x8 ~" P9 O: o: j1 I
/ s# H! a" j6 E" d3 t% Y1 l- D6 Yimport utilities ; F, n& ~/ ]0 S( M: c
6 {/ S4 d4 @5 p2 A! l o# Load input data: e; ~+ b5 B: I
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
& k. W, w- B) I# v( G DX, y = utilities.load_data(input_file)
8 o# E8 Z/ }# w: `, }: w4 j3 W. A1 V( `( S9 G6 `+ z5 a% B$ O3 f
###############################################7 n$ {/ ~5 h3 |$ Z
# Separate the data into classes based on 'y'& q+ A8 r6 T+ h) M1 c4 o5 b
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
& l0 X0 i1 I- Lclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
* l- |" d+ `: H- b
+ b' h. G. _# J) j' z- P' E# Plot the input data8 }3 O5 x$ m1 }8 s
plt.figure()
V8 z5 |# v" O, \1 r& D7 J1 Vplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
3 L$ c+ T6 ?9 zplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
8 E3 \2 u9 w1 p! |# nplt.title('Input data')
8 _# i9 C* f9 H% `
2 W% s/ `# U9 l! R8 b5 L###############################################
! }1 d8 Y) W4 i a0 E: C- q1 \# Train test split and SVM training
% Q8 ^2 b" f& d. Q- ?/ n% c, Tfrom sklearn import cross_validation6 q# h3 v2 P4 [
from sklearn.svm import SVC
! t9 r/ g! X4 O( Z0 a) m9 H1 t0 a- S# V& m4 S& F
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)5 [; I) _: h7 U2 Z& w5 S* a8 }
$ M& g7 K8 L, O g5 {
#params = {'kernel': 'linear'}( ?* P% o" ]; R L2 z
#params = {'kernel': 'poly', 'degree': 3}
. p+ c5 e% S8 r6 [8 k Y# \# Wparams = {'kernel': 'rbf'}" U9 }: M0 R6 C- ?5 F& n2 S5 ~
classifier = SVC(**params)
- N$ D" Y4 w- t- @8 s7 M1 }, A; gclassifier.fit(X_train, y_train)
, ^" m6 _' L& t. }. y3 B# Jutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
* K2 b) R- g3 v4 N2 ~ E& E3 h. _) f4 e. c
y_test_pred = classifier.predict(X_test)
) P- L2 f: I7 O9 {utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
0 v% F1 Z; P3 ?: l! z3 I: E+ q& [5 V& l
###############################################
8 g# w" D$ T% r9 I9 Y" q# Evaluate classifier performance a; F6 q$ @6 m6 o( q! R
( Z$ [+ y& U& l% c) }; m# X5 `. Mfrom sklearn.metrics import classification_report2 k: v: q$ Y/ ], b
+ I/ q4 ~+ I S7 ^target_names = ['Class-' + str(int(i)) for i in set(y)]
6 [+ a% m' n6 H: dprint "\n" + "#"*30
5 z# T9 p. H1 U" m2 T4 @print "\nClassifier performance on training dataset\n"% U6 X! k6 \5 r' y2 c, L3 [
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 R7 D, L; Z! j( }( Z. F; {0 P! rprint "#"*30 + "\n"
( t% @& `% |( t H) b. u* C" f$ u) z/ E! E/ J& L
print "#"*30
3 ?+ e9 X! z0 a$ Y# Iprint "\nClassification report on test dataset\n"
; |% f) h+ @0 T8 W* Aprint classification_report(y_test, y_test_pred, target_names=target_names)
O4 i* f' } p% O- H+ _% _0 Qprint "#"*30 + "\n"3 b9 G" U, x2 t( X! A
; g, f8 _9 o- P2 b# o% o
|
|