How to use the xgboost.Booster function in xgboost

To help you get started, we’ve selected a few xgboost examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github chintak / fast-hair-segmentation / utils.py View on Github external
def hr_name_to_models(mnames):
    model_feats = []
    if not mnames: return []
    for model_fname in mnames.split(','):
        model_type, feat_type = get_model_feature_type(model_fname)
        bst = xgb.Booster(params=getattr(configs, model_type)(val=True))
        bst.load_model(model_fname)
        Conf = getattr(configs, feat_type)()
        featurize = Conf['FEATS']
        window = Conf['WINDOW']
        model_feats.append((featurize, bst, window))
    return model_feats
github neptune-ml / steppy-toolkit / toolkit / xgboost_transformers / models.py View on Github external
def load(self, filepath):
        self.estimator = xgb.Booster(params=self.booster_parameters)
        self.estimator.load_model(filepath)
        return self
github TeamHG-Memex / eli5 / eli5 / xgboost.py View on Github external
@explain_prediction.register(Booster)
def explain_prediction_xgboost(
        xgb, doc,
        vec=None,
        top=None,
        top_targets=None,
        target_names=None,
        targets=None,
        feature_names=None,
        feature_re=None,  # type: Pattern[str]
        feature_filter=None,
        vectorized=False,  # type: bool
        is_regression=None,  # type: bool
        missing=None,  # type: bool
        ):
    """ Return an explanation of XGBoost prediction (via scikit-learn wrapper
    XGBClassifier or XGBRegressor, or via xgboost.Booster) as feature weights.
github HouJP / kaggle-quora-question-pairs / houjp / bin / model.py View on Github external
model_fp = cf.get('DEFAULT', 'model_pt') + '/xgboost.model'
        # params = {}
        # params['objective'] = cf.get('XGBOOST_PARAMS', 'objective')
        # params['eval_metric'] = cf.get('XGBOOST_PARAMS', 'eval_metric')
        # params['eta'] = float(cf.get('XGBOOST_PARAMS', 'eta'))
        # params['max_depth'] = cf.getint('XGBOOST_PARAMS', 'max_depth')
        # params['subsample'] = float(cf.get('XGBOOST_PARAMS', 'subsample'))
        # params['colsample_bytree'] = float(cf.get('XGBOOST_PARAMS', 'colsample_bytree'))
        # params['min_child_weight'] = cf.getint('XGBOOST_PARAMS', 'min_child_weight')
        # params['silent'] = cf.getint('XGBOOST_PARAMS', 'silent')
        # params['num_round'] = cf.getint('XGBOOST_PARAMS', 'num_round')
        # params['early_stop'] = cf.getint('XGBOOST_PARAMS', 'early_stop')
        # params['nthread'] = cf.getint('XGBOOST_PARAMS', 'nthread')
        # params['best_ntree_limit'] = cf.getint('XGBOOST_PARAMS', 'best_ntree_limit')
        params = Model.get_parameters_xgb(cf)
        model = xgb.Booster(params)
        model.load_model(model_fp)

        return model, params
github dmlc / xgboost / demo / kaggle-higgs / higgs-pred.py View on Github external
# path to where the data lies
dpath = 'data'

modelfile = 'higgs.model'
outfile = 'higgs.pred.csv'
# make top 15% as positive
threshold_ratio = 0.15

# load in training data, directly use numpy
dtest = np.loadtxt( dpath+'/test.csv', delimiter=',', skiprows=1 )
data   = dtest[:,1:31]
idx = dtest[:,0]

print ('finish loading from csv ')
xgmat = xgb.DMatrix( data, missing = -999.0 )
bst = xgb.Booster({'nthread':16}, model_file = modelfile)
ypred = bst.predict( xgmat )

res  = [ ( int(idx[i]), ypred[i] ) for i in range(len(ypred)) ]

rorder = {}
for k, v in sorted( res, key = lambda x:-x[1] ):
    rorder[ k ] = len(rorder) + 1

# write out predictions
ntop = int( threshold_ratio * len(rorder ) )
fo = open(outfile, 'w')
nhit = 0
ntot = 0
fo.write('EventId,RankOrder,Class\n')
for k, v in res:
    if rorder[k] <= ntop:
github DerThorsten / nifty / deprecated / segmentation_pipeline / tools.py View on Github external
def load(self, fname):
        self.bst = xgb.Booster({'nthread':self.nThreads}) #init model
        self.bst.load_model(fname)             # load data
github bentoml / BentoML / bentoml / artifact / xgboost_model_artifact.py View on Github external
def load(self, path):
        try:
            import xgboost as xgb
        except ImportError:
            raise MissingDependencyException(
                "xgboost package is required to use XgboostModelArtifact"
            )
        bst = xgb.Booster()
        bst.load_model(self._model_file_path(path))

        return self.pack(bst)
github aws / sagemaker-xgboost-container / src / sagemaker_xgboost_container / algorithm_mode / server_handler.py View on Github external
def default_model_fn(self, model_dir):
        model_file = os.listdir(model_dir)[0]
        try:
            booster = pkl.load(open(os.path.join(model_dir, model_file), 'rb'))
            format = 'pkl_format'
        except Exception as exp_pkl:
            try:
                booster = xgb.Booster()
                booster.load_model(os.path.join(model_dir, model_file))
                format = 'xgb_format'
            except Exception as exp_xgb:
                raise ModelLoadInferenceError("Unable to load model: %s %s", exp_pkl, exp_xgb)
        booster.set_param('nthread', 1)
        return booster, format
github IBM / adversarial-robustness-toolbox / art / classifiers / xgboost.py View on Github external
def nb_classes(self):
        """
        Return the number of output classes.

        :return: Number of classes in the data.
        :rtype: `int`
        """
        from xgboost import Booster, XGBClassifier
        if isinstance(self._model, Booster):
            try:
                return int(len(self._model.get_dump(dump_format='json')) / self._model.n_estimators)
            except AttributeError:
                if self._nb_classes is not None:
                    return self._nb_classes
                raise NotImplementedError('Number of classes cannot be determined automatically. ' +
                                          'Please manually set argument nb_classes in XGBoostClassifier.')

        if isinstance(self._model, XGBClassifier):
            return self._model.n_classes_

        return None