custum loss function xgboost

Solutions on MaxInterview for custum loss function xgboost by the best coders in the world

showing results for - "custum loss function xgboost"
Liah
30 Nov 2016
1import numpy as np
2import xgboost as xgb
3from typing import Tuple
4
5def gradient(predt: np.ndarray, dtrain: xgb.DMatrix) -> np.ndarray:
6    '''Compute the gradient squared log error.'''
7    y = dtrain.get_label()
8    return (np.log1p(predt) - np.log1p(y)) / (predt + 1)
9
10def hessian(predt: np.ndarray, dtrain: xgb.DMatrix) -> np.ndarray:
11    '''Compute the hessian for squared log error.'''
12    y = dtrain.get_label()
13    return ((-np.log1p(predt) + np.log1p(y) + 1) /
14            np.power(predt + 1, 2))
15
16def squared_log(predt: np.ndarray,
17                dtrain: xgb.DMatrix) -> Tuple[np.ndarray, np.ndarray]:
18    '''Squared Log Error objective. A simplified version for RMSLE used as
19    objective function.
20    '''
21    predt[predt < -1] = -1 + 1e-6
22    grad = gradient(predt, dtrain)
23    hess = hessian(predt, dtrain)
24    return grad, hess
25
similar questions
queries leading to this page
custum loss function xgboost