grid_search_model_selection.py

# -*- coding: utf-8 -*-
import subprocess
from time import time
from operator import itemgetter
from scipy.stats import randint
import pandas as pd
import numpy as np
from sklearn import metrics
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import export_graphviz
from sklearn.grid_search import GridSearchCV
from sklearn.grid_search import RandomizedSearchCV
from sklearn.cross_validation import cross_val_score

def report(grid_scores, n_top=1):
"""Report top n_top parameters settings, default n_top=3.

Args
----
grid_scores -- output from grid or random search
n_top -- how many to report, of top models

Returns
-------
top_params -- [dict] top parameter settings found in search
"""
top_scores = sorted(grid_scores,
key=itemgetter(1),
reverse=True)[:n_top]
for i, score in enumerate(top_scores):
print("Model with rank: {0}".format(i + 1))
print(("Mean validation score: "
"{0:.3f} (std: {1:.3f})").format(
score.mean_validation_score,
np.std(score.cv_validation_scores)))
print("Parameters: {0}".format(score.parameters))
print("")
score_mean=score.mean_validation_score
score_std=np.std(score.cv_validation_scores)
best_parameters=format(score.parameters)


return [score_mean,score_std,best_parameters]


def gridsearch(X, y, clf, param_grid, scoring="accuracy",cv=10):
"""Run a grid search for best Decision Tree parameters.

Args
----
X -- features
y -- targets (classes)
cf -- scikit-learn Decision Tree
param_grid -- [dict] parameter settings to test
cv -- fold of cross-validation, default 5

Returns
-------
top_params -- [dict] from report()
"""
grid_search = GridSearchCV(clf,
param_grid=param_grid,scoring=scoring,
cv=cv)
start = time()
grid_search.fit(X, y)
print(("\nGridSearchCV took {:.2f} "
"seconds for {:d} candidate "
"parameter settings.").format(time() - start,
len(grid_search.grid_scores_)))

top_params = report(grid_search.grid_scores_)
return [top_params,grid_search]



def get_best_model(classifier_families,X,y):
best_quality = 0.0
best_classifier = None
classifiers = []
for name, model, parameters in classifier_families:
classifiers.append([name]+gridsearch(X, y,model, parameters))

for name, scores,clf in classifiers:
if scores[0] > best_quality:
best_quality = scores[0]
best_classifier=[name, scores,clf]

return best_classifier



猜你喜欢

转载自www.cnblogs.com/ffeng0312/p/10153539.html