- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 C2 V1 c3 u) r. Q, d6 dimport matplotlib.pyplot as plt
; R" W( z v0 }; W* v: w, s) g- L. H( O
import utilities
, N8 ~) ]1 K: l2 E" q
. R, Q! @) V" h f5 }# Load input data0 x4 l3 h9 m Q" @! \) X
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
- _! k( o* k" \2 P: f) {X, y = utilities.load_data(input_file)2 \! @! M2 l4 U4 u, u/ X# w
$ B+ w5 t8 K6 W5 v" o
###############################################: Q1 X' g0 S$ ~9 U$ r; s7 r ~
# Separate the data into classes based on 'y'
' g$ B: x3 I9 n( {5 {/ sclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
5 k4 ~6 E. a& w' B7 C7 dclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
+ x! }" `5 C$ W# S5 d9 a, M: @8 B' E6 f) B" Q8 v/ Z* Q
# Plot the input data
+ V8 Q5 a0 x( U0 G/ A Eplt.figure()
. k/ I0 k% K2 ?2 Z( |plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
: D% N: r% N) _, U( ^8 @1 k7 Fplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
4 W. O" k, O1 x% L aplt.title('Input data') p9 }7 L$ |6 Q4 M7 V4 ^ S! X
9 H4 W5 S1 I0 T$ S$ N, k. `1 ~- t
###############################################4 b7 c! _) k5 v) H6 Q
# Train test split and SVM training
1 A: x+ a3 u6 ifrom sklearn import cross_validation% b: _, ^; g4 x' T
from sklearn.svm import SVC
) w8 G7 W; }1 F) r8 v7 ?
# Y! h9 w- b( h/ SX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
5 e; b% A8 Y' ^. p: d
. u, y0 q( z; U- R#params = {'kernel': 'linear'}5 w) ?1 H8 O3 b( |( S5 e c* U
#params = {'kernel': 'poly', 'degree': 3}
8 H. j5 ]5 @3 U% Mparams = {'kernel': 'rbf'}
0 `2 I8 g9 A3 d) m7 T6 Oclassifier = SVC(**params)- R! d& ?* p" O) d
classifier.fit(X_train, y_train)1 M& E) ?4 J4 h3 ^2 y
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')3 V: _' B2 l( G3 Q; u1 g6 g
7 X6 Q8 C! J1 _& C1 X3 yy_test_pred = classifier.predict(X_test)
1 ~# h" z; L9 R/ s1 ~5 Butilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')' J1 }% U7 O& _7 i
# r1 N9 C8 y1 \$ X [2 |* t
###############################################6 ~, n- ~: d5 ?4 d; C/ k
# Evaluate classifier performance
. A) r: l& A8 r' v' t; v: Y4 }! \, U; M( ~5 c3 }
from sklearn.metrics import classification_report* N) ?* U/ j; \) p
# @, B# T2 c5 M6 a. b+ m# x0 J1 U8 w7 Wtarget_names = ['Class-' + str(int(i)) for i in set(y)]
2 `4 a4 H" L9 r$ B! H, C# b. mprint "\n" + "#"*30% r( D# } r# |$ O R% `
print "\nClassifier performance on training dataset\n"
( z- g7 o5 {3 F1 Iprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
( ~2 s, K1 |+ X7 n/ T' Zprint "#"*30 + "\n"
$ H' [) c: r2 A l
: M% W0 i9 I: P3 Mprint "#"*30
9 |- B3 b. E; B4 q$ Z8 ~# |! `print "\nClassification report on test dataset\n"& r$ T( A N3 A" s( X2 C) I% f
print classification_report(y_test, y_test_pred, target_names=target_names)
/ \ g! Z/ | Y; P/ e8 r% ?print "#"*30 + "\n"5 |2 R% b6 i9 r) }) O1 g# m
% R' I2 t. }' Q; B1 S! R$ H |
|