Skip to content

Instantly share code, notes, and snippets.

View Davisy's full-sized avatar
🎯
Focusing

Davis David Davisy

🎯
Focusing
View GitHub Profile
optuna.visualization.plot_optimization_history(study)
print(study.best_value)
print(study.best_params)
# pass the objective function to method optimize()
study.optimize(objective, n_trials=10)
# create a study object
study = optuna.create_study(study_name="randomForest_optimization",
direction="maximize",
sampler=TPESampler())
# define the search space and the objecive function
def objective(trial):
# Define the search space
criterions = trial.suggest_categorical('criterion', ['gini', 'entropy'])
max_depths = trial.suggest_int('max_depth', 1, 9, 1)
n_estimators = trial.suggest_int('n_estimators', 100, 1000, 100)
clf = RandomForestClassifier(n_estimators=n_estimators,
def objective(trial):
# Define the search space
criterions = trial.suggest_categorical('criterion', ['gini', 'entropy'])
max_depths = trial.suggest_int('max_depth', 1, 9, 1)
n_estimators = trial.suggest_int('n_estimators', 100, 1000, 100)
clf = sklearn.ensemble.RandomForestClassifier(n_estimators=n_estimators,
criterion=criterions,
max_depth=max_depths,
# import packages
import numpy as np
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from sklearn import metrics
from sklearn.model_selection import cross_val_score
from sklearn.preprocessing import StandardScaler
import joblib
import optuna
# plot convergence
from skopt.plots import plot_convergence
plot_convergence(result)
# summarizing finding:
print('Best Accuracy: %.3f' % (result.fun))
print('Best Parameters: %s' % (result.x))