Skip to content

Instantly share code, notes, and snippets.

@WillKoehrsen
Last active March 14, 2019 06:28
Show Gist options
  • Save WillKoehrsen/f42714a0458b5b109a542e73258ee224 to your computer and use it in GitHub Desktop.
Save WillKoehrsen/f42714a0458b5b109a542e73258ee224 to your computer and use it in GitHub Desktop.
import lightgbm as lgb
from hyperopt import STATUS_OK
N_FOLDS = 10
# Create the dataset
train_set = lgb.Dataset(train_features, train_labels)
def objective(params, n_folds = N_FOLDS):
"""Objective function for Gradient Boosting Machine Hyperparameter Tuning"""
# Perform n_fold cross validation with hyperparameters
# Use early stopping and evalute based on ROC AUC
cv_results = lgb.cv(params, train_set, nfold = n_folds, num_boost_round = 10000,
early_stopping_rounds = 100, metrics = 'auc', seed = 50)
# Extract the best score
best_score = max(cv_results['auc-mean'])
# Loss must be minimized
loss = 1 - best_score
# Dictionary with information for evaluation
return {'loss': loss, 'params': params, 'status': STATUS_OK}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment