- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
$ [4 a* j0 P2 S9 C/ K+ m# `import matplotlib.pyplot as plt
+ d0 r4 |3 D8 N+ O1 l
4 V/ C Q, L- k" \! k) a3 |) Zimport utilities 8 u* l1 V5 _& P8 g4 Q& [& w( L$ C: ~
' A5 O8 E# e% |7 W
# Load input data: u2 [; ], c5 \+ D% V
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: D$ h$ i- L3 Q$ p; G2 o, W6 PX, y = utilities.load_data(input_file)
. @: L5 X# y/ v" V8 w( n& v. V+ W" o6 B8 n
###############################################
, \7 M/ O) u- L8 n# Separate the data into classes based on 'y'' X5 r6 n, { U7 _7 O0 {4 `
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])4 s) a1 X% m4 g' o1 U; T
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
5 x8 S. m* R2 Z2 A9 _# O# f' b) L6 I( @ ^. e( \
# Plot the input data
2 o7 p' B* M5 U: j# [( Jplt.figure()/ d9 R4 O9 B- B8 J
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
9 h" ?* ^! t% O4 ]6 U( j$ Iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')6 @5 N/ u( j0 S( ~6 j- f
plt.title('Input data')
7 x D( C- i: a: b0 w
. M" T+ d9 y& y/ q) I/ ^4 D0 Y###############################################
6 \& `9 T% K9 [9 Y# Train test split and SVM training+ m. z4 v* a+ ^9 C* {- z
from sklearn import cross_validation0 Z1 \6 L% D4 g% y$ p
from sklearn.svm import SVC
+ S# S" E8 Q0 Y3 d! D1 K) [0 V$ X" T
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
' w6 k9 `6 m; I, O; w, y; l4 m$ h* B/ J% t5 A* T
#params = {'kernel': 'linear'}8 L4 O4 j. H1 r. {5 N2 l
#params = {'kernel': 'poly', 'degree': 3}
4 }8 M+ C) _+ p5 sparams = {'kernel': 'rbf'}3 A% u2 M7 L7 K+ w/ ~! U$ ]4 y
classifier = SVC(**params)
1 g: ~$ w- @% oclassifier.fit(X_train, y_train)
6 z# `1 V V3 Gutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')' h% a, B( W) ^. c# Q5 X
" f$ @# V6 {1 r4 J! { Zy_test_pred = classifier.predict(X_test)! o# a2 ?- K* b* X# Z) y/ g7 q; U; A
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
+ A5 W8 M- O* y7 Z
7 z6 X; V' _+ s+ _+ W* S% G' l###############################################, x! {0 j: {1 Z, M( @+ ~
# Evaluate classifier performance
+ x% K& e& O7 A+ h
7 y r6 J0 v3 ^4 S' yfrom sklearn.metrics import classification_report* E6 I1 u5 q6 |( L2 L' d5 e. J
% U+ G9 U- j8 p2 O2 s9 T
target_names = ['Class-' + str(int(i)) for i in set(y)]
p; Z$ ? C! n2 K, |* cprint "\n" + "#"*30
' \ \8 L/ v; K" q# ^3 `/ @% Vprint "\nClassifier performance on training dataset\n"$ ?+ Y0 \8 P9 P0 T* o" A
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
% W7 W3 v6 q; W8 ^0 Iprint "#"*30 + "\n"$ Y4 L% Q. R# d
# t. ]1 A. t5 V1 W0 }) O" s. E9 a
print "#"*30
- j% _5 }) J/ q2 ?) c5 E, Z+ Y) Aprint "\nClassification report on test dataset\n"
9 T) E4 j5 t! yprint classification_report(y_test, y_test_pred, target_names=target_names)
4 d. R/ N- J2 m9 {4 Jprint "#"*30 + "\n"$ F" s9 I" i9 H: S# e- W
6 d' ~8 v) F, y+ ^( I: N8 F/ H9 u |
|