https://tech.preferred.jp/ja/blog/hyperparameter-tuning-with-optuna-integration-lightgbm-tuner/
pip install optuna
import optuna
def objective(trial):
#x_train, y_train, x_test, y_Préparer le test
dtrain = lgb.Dataset(x_train, label=y_train)
param = {
'objective': 'binary',
'metric': 'binary_logloss',
'lambda_l1': trial.suggest_loguniform('lambda_l1', 1e-8, 10.0),
'lambda_l2': trial.suggest_loguniform('lambda_l2', 1e-8, 10.0),
'num_leaves': trial.suggest_int('num_leaves', 2, 256),
'feature_fraction': trial.suggest_uniform('feature_fraction', 0.4, 1.0),
'bagging_fraction': trial.suggest_uniform('bagging_fraction', 0.4, 1.0),
'bagging_freq': trial.suggest_int('bagging_freq', 1, 7),
'min_child_samples': trial.suggest_int('min_child_samples', 5, 100),
}
gbm = lgb.train(param, dtrain)
preds = gbm.predict(x_test)
pred_labels = np.rint(preds)
#pred_étiquettes et y_Calculer la valeur d'évaluation avec test
return precision #Cette fois, recherche de paramètres avec précision
study = optuna.create_study(direction='maximize')
study.optimize(objective, n_trials=100)
print('Number of finished trials:', len(study.trials))
print('Best trial:', study.best_trial.params)
Recommended Posts