|
@@ -143,15 +143,16 @@ class LightGBM(object):
|
|
|
params = {
|
|
|
"objective": "binary", # 指定二分类任务
|
|
|
"metric": "binary_logloss", # 评估指标为二分类的log损失
|
|
|
- "num_leaves": 31, # 叶子节点数
|
|
|
- "learning_rate": 0.005, # 学习率
|
|
|
- "bagging_fraction": 0.9, # 建树的样本采样比例
|
|
|
- "feature_fraction": 0.8, # 建树的特征选择比例
|
|
|
- "bagging_freq": 5, # k 意味着每 k 次迭代执行bagging
|
|
|
- "num_threads": 4, # 线程数量
|
|
|
+ "num_leaves": 36, # 叶子节点数
|
|
|
+ "learning_rate": 0.08479152931388902, # 学习率
|
|
|
+ "bagging_fraction": 0.6588121592044218, # 建树的样本采样比例
|
|
|
+ "feature_fraction": 0.4572757903437793, # 建树的特征选择比例
|
|
|
+ "bagging_freq": 2, # k 意味着每 k 次迭代执行bagging
|
|
|
+ "num_threads": 16, # 线程数量
|
|
|
+ "mini_child_samples": 71
|
|
|
}
|
|
|
# 训练模型
|
|
|
- num_round = 2000
|
|
|
+ num_round = 100
|
|
|
print("开始训练......")
|
|
|
bst = lgb.train(params, train_data, num_round, valid_sets=[test_data])
|
|
|
bst.save_model(self.model)
|
|
@@ -215,10 +216,10 @@ class LightGBM(object):
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
L = LightGBM()
|
|
|
- study = optuna.create_study(direction='maximize')
|
|
|
- study.optimize(L.bays_params, n_trials=100)
|
|
|
- print('Number of finished trials:', len(study.trials))
|
|
|
- print('Best trial:', study.best_trial.params)
|
|
|
- # L.train_model()
|
|
|
- # L.evaluate_model()
|
|
|
- # L.feature_importance()
|
|
|
+ # study = optuna.create_study(direction='maximize')
|
|
|
+ # study.optimize(L.bays_params, n_trials=100)
|
|
|
+ # print('Number of finished trials:', len(study.trials))
|
|
|
+ # print('Best trial:', study.best_trial.params)
|
|
|
+ L.train_model()
|
|
|
+ L.evaluate_model()
|
|
|
+ L.feature_importance()
|