-
Notifications
You must be signed in to change notification settings - Fork 0
/
R code for boosting tune
31 lines (30 loc) · 1.03 KB
/
R code for boosting tune
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
hyper_grid <- expand.grid(
shrinkage = c(.01, .1, .3),
interaction.depth = c(1, 3, 5),
optimal_trees = 0,
min_RMSE = 0
)
nrow(hyper_grid)
# We loop through each hyperparameter combination and apply 8000 trees.
# randomize data
random_index <- sample(1:nrow(train_CO2), nrow(train_CO2))
random_CO2_train <- train_CO2[random_index, ]
for(i in 1:nrow(hyper_grid)) {
# train model
gbm.tune <- gbm(
formula=CO2~MemberState+MfrHarmonised+ApprovalYr+Year+FuelType+Mass
+EngineSize+Power,
distribution = "gaussian",
data = random_CO2_train,
n.trees = 8000,
interaction.depth = hyper_grid$interaction.depth[i],
shrinkage = hyper_grid$shrinkage[i],
train.fraction = .75,
n.cores = NULL, # will use all cores by default
verbose = FALSE)
predict(gbm.tune,)
hyper_grid$optimal_trees[i] <- which.min()
hyper_grid$min_RMSE[i] <- sqrt(min(gbm.tune$valid.error))
}
hyper_grid %>%dplyr::arrange(min_RMSE) %>%head(10)
# best parameter:shrinkage=0.1,interaction.depth=3,optimal trees=7192