- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np: e2 a5 \3 z u! g$ ]( L& U0 w
import matplotlib.pyplot as plt. s3 e& c' h: L8 Y
( ]8 Y6 e# X& a- S' |import utilities
; O4 k: w" s$ R, v. P& U) `4 b3 R+ E) ?+ Q
# Load input data
4 c6 C' E0 h0 Yinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
& s+ x, A7 D4 A# hX, y = utilities.load_data(input_file)
& a; W# \, H4 k6 ~4 u {
" I) c# t* t" d$ w0 O j' N###############################################
0 p- w( W% \9 X2 Z+ C! t) V6 B6 L# Separate the data into classes based on 'y'
" Q$ D4 u7 f/ C8 w, s6 gclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
" `& \* M# c9 c4 i7 ^% }# \& g% S: ^4 vclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
% P, z3 u1 {4 L/ U
) h# P$ S4 l2 y+ T0 q3 t% a+ f1 X# Plot the input data
- W3 @3 e8 S$ nplt.figure()
- r, d- o) n) aplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
- Q+ ^8 ~* w$ o' uplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')5 g$ K/ ]1 R7 B; v$ h7 B+ O$ E
plt.title('Input data')6 ^# a+ ^# j* J6 Q/ ^- M
$ [. b( C3 w) J
###############################################* n6 C2 Y/ I# t R
# Train test split and SVM training& l, a+ S) V( y9 p; Y- D5 i; Y5 S
from sklearn import cross_validation
& ]9 Q6 U0 ]- }. F# g( J: n: ~from sklearn.svm import SVC1 I! f0 u* K" Q3 j. x) e# N, r
$ S' x. n1 @5 y9 V0 v- Z" Z
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: \/ ~7 G% G. S) U" \. I: K% ~$ s* h/ k$ A4 h A. m8 A. b
#params = {'kernel': 'linear'}
" n) Y% @4 x f5 t0 `6 b#params = {'kernel': 'poly', 'degree': 3}
& ^. Q; Z$ j- i- M3 Aparams = {'kernel': 'rbf'}3 t& V- z8 _8 |8 t: C, ^
classifier = SVC(**params)
. ^; A& `/ G8 y D- k/ pclassifier.fit(X_train, y_train)
- U3 E- G8 `# W6 K9 [- g7 P6 p5 lutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')7 T. b! L# d# O/ R6 |' g& X
5 n2 ]$ ^: R3 p2 Y( g3 ^: ey_test_pred = classifier.predict(X_test)
' U o/ G/ p# M* t5 Nutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
" F% G9 R( {3 k1 o. W8 S, S p6 m% s; Q9 R+ b0 }+ G- {
###############################################) K6 ^, A& Z: V
# Evaluate classifier performance f8 @* o: @ D5 j# W0 P# u5 C
2 r% Y* @, K, g7 a ^9 Ffrom sklearn.metrics import classification_report* J# E' X8 \5 ]; S2 {) w9 M; r8 q
- G/ r4 V* _/ \% {0 q$ q
target_names = ['Class-' + str(int(i)) for i in set(y)]) y% d' @, Y* V( {2 k
print "\n" + "#"*30
l" k# ]; [/ N+ `print "\nClassifier performance on training dataset\n"
# K7 P6 m! Q" I% O2 x! b! U& \" d% tprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
f- y) ~3 G" n- n/ M# n6 x) ~print "#"*30 + "\n"
! R$ h' p( `2 x L* `- w' B
5 @5 q o' {$ ^" n3 l8 I7 ^3 K: Oprint "#"*30
# u8 I1 I7 N# K2 Aprint "\nClassification report on test dataset\n"
* U3 D$ ^$ `1 [! Q3 h" O) Oprint classification_report(y_test, y_test_pred, target_names=target_names)
, Y5 b9 D& S+ Jprint "#"*30 + "\n"
* Z: m( u2 ], ~" M# |/ E7 Q, W8 D- \
$ x2 Q$ |$ p( ?$ s) x9 ^4 u |
|