|
@@ -134,12 +134,12 @@ class LightGBM(object):
|
|
|
)
|
|
|
test_data = lgb.Dataset(X_test, label=Y_test, reference=train_data)
|
|
|
params = {
|
|
|
- 'num_leaves': 37,
|
|
|
- 'learning_rate': 0.00026047995636150966,
|
|
|
- 'feature_fraction': 0.8289909088251219,
|
|
|
- 'bagging_fraction': 0.40745822386921504,
|
|
|
+ 'num_leaves': 31,
|
|
|
+ 'learning_rate': 0.00020616904432655601,
|
|
|
+ 'feature_fraction': 0.6508847259863764,
|
|
|
+ 'bagging_fraction': 0.7536774652478249,
|
|
|
'bagging_freq': 6,
|
|
|
- 'min_child_samples': 15,
|
|
|
+ 'min_child_samples': 99,
|
|
|
'num_threads': 16
|
|
|
}
|
|
|
|
|
@@ -210,20 +210,20 @@ class LightGBM(object):
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
- # i = int(input("输入 1 训练, 输入 2 预测:\n"))
|
|
|
- # if i == 1:
|
|
|
- # f = "train"
|
|
|
- # dt = "whole"
|
|
|
- # L = LightGBM(flag=f, dt=dt)
|
|
|
- # L.train_model()
|
|
|
- # elif i == 2:
|
|
|
- # f = "predict"
|
|
|
- # dt = int(input("输入日期, 16-21:\n"))
|
|
|
- # L = LightGBM(flag=f, dt=dt)
|
|
|
- # L.evaluate_model()
|
|
|
- # L.feature_importance()
|
|
|
- L = LightGBM("train", "whole")
|
|
|
- study = optuna.create_study(direction='maximize')
|
|
|
- study.optimize(L.bays_params, n_trials=100)
|
|
|
- print('Number of finished trials:', len(study.trials))
|
|
|
- print('Best trial:', study.best_trial.params)
|
|
|
+ i = int(input("输入 1 训练, 输入 2 预测:\n"))
|
|
|
+ if i == 1:
|
|
|
+ f = "train"
|
|
|
+ dt = "whole"
|
|
|
+ L = LightGBM(flag=f, dt=dt)
|
|
|
+ L.train_model()
|
|
|
+ elif i == 2:
|
|
|
+ f = "predict"
|
|
|
+ dt = int(input("输入日期, 16-21:\n"))
|
|
|
+ L = LightGBM(flag=f, dt=dt)
|
|
|
+ L.evaluate_model()
|
|
|
+ L.feature_importance()
|
|
|
+ # L = LightGBM("train", "whole")
|
|
|
+ # study = optuna.create_study(direction='maximize')
|
|
|
+ # study.optimize(L.bays_params, n_trials=100)
|
|
|
+ # print('Number of finished trials:', len(study.trials))
|
|
|
+ # print('Best trial:', study.best_trial.params)
|