import numpy as np
import xgboost as xgb

Advanced customised loss function

print('Start running example to use customised objective function.')
Start running example to use customised objective function.
dtrain = xgb.DMatrix('./demo/data/agaricus.txt.train')
dtest = xgb.DMatrix('./demo/data/agaricus.txt.test')
param = {'max_depth': 2, 'eta': 1, 'silent': 1}
watchlist = [(dtest, 'eva'), (dtrain, 'train')]
num_round = 2
def logregobj(preds, dtrain):
    labels = dtrain.get_label()
    preds = 1.0 / (1.0 + np.exp(-preds)) # 不是很明白这里为什么要进行 sigmoid 计算
    grad = preds - labels
    hess = preds * (1.0 - preds)
    return grad, hess
def evalerror(preds, dtrain):
    labels = dtrain.get_label()
    return 'error', float(sum(labels != (preds > 0.0)) / len(labels))
bst = xgb.train(param, dtrain, num_round, watchlist, logregobj, evalerror)
[0]    eva-rmse:1.59229    train-rmse:1.59597    eva-error:0.042831    train-error:0.046522
[1]    eva-rmse:2.40519    train-rmse:2.40977    eva-error:0.021726    train-error:0.022263

results matching ""

    No results matching ""