Yes, it is possible to tune. For example:
class ExponentialPairwiseLoss(object):
def __init__(self, groups):
self.groups = groups
def __call__(self, preds, dtrain):
labels = dtrain.get_label().astype(np.int)
rk = len(np.bincount(labels))
plus_exp = np.exp(preds)
minus_exp = np.exp(-preds)
grad = np.zeros(preds.shape)
hess = np.zeros(preds.shape)
pos = 0
for size in self.groups:
sum_plus_exp = np.zeros((rk,))
sum_minus_exp = np.zeros((rk,))
for i in range(pos, pos + size, 1):
sum_plus_exp[labels[i]] += plus_exp[i]
sum_minus_exp[labels[i]] += minus_exp[i]
for i in range(pos, pos + size, 1):
grad[i] = -minus_exp[i] * np.sum(sum_plus_exp[:labels[i]]) +\
plus_exp[i] * np.sum(sum_minus_exp[labels[i] + 1:])
hess[i] = minus_exp[i] * np.sum(sum_plus_exp[:labels[i]]) +\
plus_exp[i] * np.sum(sum_minus_exp[labels[i] + 1:])
pos += size
return grad, hess