XGBModel.py 1.1 KB

1234567891011121314151617181920212223242526272829303132333435
  1. import numpy as np
  2. import xgboost as xgb
  3. class XGBModel(object):
  4. def __init__(self, model_file, features: list):
  5. self.model_file = model_file
  6. self.model = xgb.Booster(model_file=model_file)
  7. self.features = features
  8. def predict(self, feature_map: dict) -> float:
  9. values = np.array([
  10. float(feature_map.get(feature, 0.0))
  11. for feature in self.features
  12. ])
  13. dm = xgb.DMatrix(values.reshape(1, -1), missing=0.0)
  14. return float(self.model.predict(dm, output_margin=False)[0])
  15. def feature_weight_importance(self):
  16. return self.feature_importance("weight")
  17. def feature_cover_importance(self):
  18. return self.feature_importance("cover")
  19. def feature_gain_importance(self):
  20. return self.feature_importance("gain")
  21. def feature_importance(self, importance_type: str):
  22. importance_map = {}
  23. score_map = self.model.get_score(importance_type=importance_type)
  24. for key in score_map:
  25. k = self.features[int(key[1:])]
  26. importance_map[k] = score_map[key]
  27. return importance_map