Skip to content

Instantly share code, notes, and snippets.

@3catz
Last active May 29, 2021 09:22
Show Gist options
  • Select an option

  • Save 3catz/1b14c9e05ca8d9c9cb911bb1032eb18f to your computer and use it in GitHub Desktop.

Select an option

Save 3catz/1b14c9e05ca8d9c9cb911bb1032eb18f to your computer and use it in GitHub Desktop.
catboost_HPopt_CV
from catboost import Pool, cv, CatBoostClassifier
from bayes_opt import BayesianOptimization
from sklearn.model_selection import *
from sklearn.metrics import *
def CB_opt(n_estimators, depth, learning_rate, max_bin,
subsample, num_leaves, l2_leaf_reg, model_size_reg):
scores = []
skf = StratifiedKFold(n_splits = 5, shuffle = True, random_state = 1944)
for train_index, test_index in skf.split(X1, Y1):
trainx, valx = X1.iloc[train_index], X1.iloc[test_index]
trainy, valy = Y1.iloc[train_index], Y1.iloc[test_index]
reg = CatBoostClassifier(verbose = 0,
n_estimators = int(n_estimators),
learning_rate = learning_rate,
subsample = subsample,
l2_leaf_reg = l2_leaf_reg,
max_depth = int(depth),
num_leaves = int(num_leaves),
random_state = 88,
grow_policy = "Lossguide",
max_bin = int(max_bin),
use_best_model = True,
model_size_reg = model_size_reg,
)
reg.fit(trainx, trainy, eval_set = (valx, valy))
scores.append(matthews_corrcoef(valy, reg.predict(valx)))
return np.mean(scores)
pbounds = {"n_estimators": (150,400),
"depth": (2,7),
"learning_rate": (.01, 0.2),
"subsample":(0.6, 1.),
"num_leaves": (16,40),
"max_bin":(150,300),
"l2_leaf_reg":(0,10),
"model_size_reg": (0,10)
}
optimizer = BayesianOptimization(
f = CB_opt,
pbounds = pbounds,
verbose = 2,
random_state = 888,
)
optimizer.maximize(init_points = 2, n_iter = 20)
print(optimizer.max)
@nithin-uppalapati
Copy link
Copy Markdown

nithin-uppalapati commented May 29, 2021

Something's wrong with CB_opt function. Please check.
(What are X1 and Y1 in CB_opt function?)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment