Slide 18
Slide 18 text
import statsmodels.formula.api as smf
q = .975
mod = smf.quantreg('y ~ x', data)
upper_mod = mod.fit(q=q)
lower_mod = mod.fit(q=1-q)
import lightgbm as lgb
q = .975
clf_upper = lgb.LGBMRegressor(objective='quantile', alpha=q).fit(x, y)
clf_lower = lgb.LGBMRegressor(objective='quantile', alpha=1-q).fit(x, y)
# lossʹԼهͷΑ͏ͳϐϯϘʔϧଛࣦؔΛఆٛͯ͠ࢦఆ͢Δ
class QuantileLoss(nn.Module):
def __init__(self, quantiles):
super().__init__()
self.quantiles = quantiles
def forward(self, preds, target):
assert not target.requires_grad
assert preds.size(0) == target.size(0)
losses = []
for i, q in enumerate(self.quantiles):
errors = target - preds[:, i]
losses.append(
torch.max(
(q-1) * errors,
q * errors
).unsqueeze(1))
loss = torch.mean(
torch.sum(torch.cat(losses, dim=1), dim=1))
return loss
ઢܗϞσϧʢPython: statsmodelsʣ
ޯϒʔεςΟϯάܾఆʢPython: Lightgbmʣ
Deep LearningʢPython: Pytorchʣ
ઢܗϞσϧʢR: quantreg R͍ͬͺ͍͋Δʣ
library(quantreg)
rq(y~x, data=data, tau=seq(0,1,0.25))