- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
+ v& L q3 Z* qimport matplotlib.pyplot as plt
: A( e! C8 ^' H2 ^$ V
; _6 K' d/ q( C' n, c: ~import utilities 2 y- L( p2 z' V5 n0 @7 _3 N8 W
. f* x- `1 e, [" p/ Q* I, Z9 y# Load input data8 T3 ^& I# U! t0 N, V% g, S3 o* J
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
n! [% j5 w& ? j, a; C' oX, y = utilities.load_data(input_file)1 u! F8 W" ?9 L
- @, f* z7 M7 y' F# I- ~
###############################################( G3 r; G2 _0 }: `/ b$ P( R, @# u8 a, I
# Separate the data into classes based on 'y'
1 Y" [: l9 m/ c2 z/ Iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])! P& Q4 y' L7 l0 T0 r. b, v
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])+ I' w" u) O( Z1 _0 W
% y; a( K: R! |( m' v& R) l: K# Plot the input data; O% R6 L4 l7 x
plt.figure()
+ V' I9 y$ h: e @' S/ @/ t; Wplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')+ g! l Z @; A) H+ o
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
; l9 ?' f5 A: m2 l. Hplt.title('Input data')
8 X$ ^5 n; |9 }+ J8 g. j
1 `* j/ g. a. M* b+ O3 M###############################################
, W* s6 d# z) M K' v3 A" f# [# Train test split and SVM training
' S8 l. C, U' U$ U! c0 g) kfrom sklearn import cross_validation5 _# H- R" e' L! ]
from sklearn.svm import SVC& \! j I& m( C* {" S5 V$ S
4 N$ X1 M" s% c" |
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
V, a, o4 r$ ^1 I1 R" _6 k" b
: }& k" Q$ I; _#params = {'kernel': 'linear'}
+ w! {. X0 D+ M' ^- V#params = {'kernel': 'poly', 'degree': 3}
. \* _- |% `/ S4 [0 H+ E% L* kparams = {'kernel': 'rbf'}& j$ ]. R' c3 M& K F5 n8 F
classifier = SVC(**params). \) c+ ^% y! V7 o( g* ]. ~9 a
classifier.fit(X_train, y_train)
# n2 e+ l0 y3 L2 T; A( ]utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
3 p5 ~ h: Y$ D+ j; b* E
! y( j) a* V9 m6 Ay_test_pred = classifier.predict(X_test)
) r+ u ^4 A! V8 k2 x; K3 cutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')$ U8 u, a4 p% a8 o
d8 n- t% B% o/ s |! r0 M" k###############################################
?' E# m x# I/ r# Evaluate classifier performance
8 k) D; A' u$ Z; _8 c
! n ]& M/ H) k' jfrom sklearn.metrics import classification_report
/ e# W( o3 S3 h! B& ~3 C9 t, a* X5 l5 q. l6 m3 h
target_names = ['Class-' + str(int(i)) for i in set(y)]
! _0 R9 i7 h3 m M2 m6 d9 {print "\n" + "#"*30+ A$ c9 S+ o$ H( O/ ?
print "\nClassifier performance on training dataset\n"8 l. T1 P; |" n) F
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
! V% y' Q, O& V1 `print "#"*30 + "\n"
- `# h6 h/ ~/ V; g* R
" V# {% V: c) G: {6 k# Nprint "#"*30# E4 G! ~. E. X- R
print "\nClassification report on test dataset\n"- ?, B7 y) u6 ~# K8 W$ a# i
print classification_report(y_test, y_test_pred, target_names=target_names)* G4 S+ u0 r2 _ h! J" V U
print "#"*30 + "\n"
& x5 q1 D- p9 U& a
2 k5 |% P) ?0 ` |
|