1234567891011121314151617181920212223242526272829303132333435 |
- import numpy as np
- import xgboost as xgb
- class XGBModel(object):
- def __init__(self, model_file, features: list):
- self.model_file = model_file
- self.model = xgb.Booster(model_file=model_file)
- self.features = features
- def predict(self, feature_map: dict) -> float:
- values = np.array([
- float(feature_map.get(feature, 0.0))
- for feature in self.features
- ])
- dm = xgb.DMatrix(values.reshape(1, -1), missing=0.0)
- return float(self.model.predict(dm, output_margin=False)[0])
- def feature_weight_importance(self):
- return self.feature_importance("weight")
- def feature_cover_importance(self):
- return self.feature_importance("cover")
- def feature_gain_importance(self):
- return self.feature_importance("gain")
- def feature_importance(self, importance_type: str):
- importance_map = {}
- score_map = self.model.get_score(importance_type=importance_type)
- for key in score_map:
- k = self.features[int(key[1:])]
- importance_map[k] = score_map[key]
- return importance_map
|