Helps create custom learning rate schedulers for dnn
.
Usage
config_lr_scheduler(
type = c("lambda", "multiplicative", "reduce_on_plateau", "one_cycle", "step"),
verbose = FALSE,
...
)
Arguments
- type
String defining which type of scheduler should be used. See Details.
- verbose
If TRUE, additional information about scheduler will be printed to console.
- ...
additional arguments to be passed to scheduler. See Details.
Value
object of class cito_lr_scheduler to give to dnn
Details
different learning rate scheduler need different variables, these functions will tell you which variables can be set:
lambda:
lr_lambda
multiplicative:
lr_multiplicative
reduce_on_plateau:
lr_reduce_on_plateau
one_cycle:
lr_one_cycle
step:
lr_step
Examples
# \donttest{
if(torch::torch_is_installed()){
library(cito)
# create learning rate scheduler object
scheduler <- config_lr_scheduler(type = "step",
step_size = 30,
gamma = 0.15,
verbose = TRUE)
# Build and train Network
nn.fit<- dnn(Sepal.Length~., data = datasets::iris, lr_scheduler = scheduler)
}
#> Learning rate Scheduler step
#> step_size: [30]
#> gamma: [0.15]
#> last_epoch: [-1]
#> Loss at epoch 1: 4.153176, lr: 0.01000
#> Loss at epoch 2: 0.207391, lr: 0.01000
#> Loss at epoch 3: 0.145729, lr: 0.01000
#> Loss at epoch 4: 0.175220, lr: 0.01000
#> Loss at epoch 5: 0.194020, lr: 0.01000
#> Loss at epoch 6: 0.141185, lr: 0.01000
#> Loss at epoch 7: 0.215565, lr: 0.01000
#> Loss at epoch 8: 0.152649, lr: 0.01000
#> Loss at epoch 9: 0.139623, lr: 0.01000
#> Loss at epoch 10: 0.130910, lr: 0.01000
#> Loss at epoch 11: 0.140172, lr: 0.01000
#> Loss at epoch 12: 0.152862, lr: 0.01000
#> Loss at epoch 13: 0.123373, lr: 0.01000
#> Loss at epoch 14: 0.125456, lr: 0.01000
#> Loss at epoch 15: 0.111656, lr: 0.01000
#> Loss at epoch 16: 0.145277, lr: 0.01000
#> Loss at epoch 17: 0.119438, lr: 0.01000
#> Loss at epoch 18: 0.150167, lr: 0.01000
#> Loss at epoch 19: 0.136253, lr: 0.01000
#> Loss at epoch 20: 0.255256, lr: 0.01000
#> Loss at epoch 21: 0.209241, lr: 0.01000
#> Loss at epoch 22: 0.126261, lr: 0.01000
#> Loss at epoch 23: 0.166959, lr: 0.01000
#> Loss at epoch 24: 0.131057, lr: 0.01000
#> Loss at epoch 25: 0.189918, lr: 0.01000
#> Loss at epoch 26: 0.118617, lr: 0.01000
#> Loss at epoch 27: 0.202249, lr: 0.01000
#> Loss at epoch 28: 0.134231, lr: 0.01000
#> Loss at epoch 29: 0.249217, lr: 0.01000
#> Loss at epoch 30: 0.123002, lr: 0.00150
#> Loss at epoch 31: 0.127133, lr: 0.00150
#> Loss at epoch 32: 0.112690, lr: 0.00150
#> Loss at epoch 33: 0.111249, lr: 0.00150
#> Loss at epoch 34: 0.109593, lr: 0.00150
#> Loss at epoch 35: 0.112791, lr: 0.00150
#> Loss at epoch 36: 0.110514, lr: 0.00150
#> Loss at epoch 37: 0.110359, lr: 0.00150
#> Loss at epoch 38: 0.110210, lr: 0.00150
#> Loss at epoch 39: 0.110014, lr: 0.00150
#> Loss at epoch 40: 0.109791, lr: 0.00150
#> Loss at epoch 41: 0.110397, lr: 0.00150
#> Loss at epoch 42: 0.108985, lr: 0.00150
#> Loss at epoch 43: 0.110937, lr: 0.00150
#> Loss at epoch 44: 0.109395, lr: 0.00150
#> Loss at epoch 45: 0.110956, lr: 0.00150
#> Loss at epoch 46: 0.109435, lr: 0.00150
#> Loss at epoch 47: 0.110090, lr: 0.00150
#> Loss at epoch 48: 0.111246, lr: 0.00150
#> Loss at epoch 49: 0.109996, lr: 0.00150
#> Loss at epoch 50: 0.109563, lr: 0.00150
#> Loss at epoch 51: 0.108836, lr: 0.00150
#> Loss at epoch 52: 0.109201, lr: 0.00150
#> Loss at epoch 53: 0.107499, lr: 0.00150
#> Loss at epoch 54: 0.109762, lr: 0.00150
#> Loss at epoch 55: 0.109464, lr: 0.00150
#> Loss at epoch 56: 0.108926, lr: 0.00150
#> Loss at epoch 57: 0.107944, lr: 0.00150
#> Loss at epoch 58: 0.107945, lr: 0.00150
#> Loss at epoch 59: 0.107832, lr: 0.00150
#> Loss at epoch 60: 0.111433, lr: 0.00022
#> Loss at epoch 61: 0.107536, lr: 0.00022
#> Loss at epoch 62: 0.107177, lr: 0.00022
#> Loss at epoch 63: 0.107213, lr: 0.00022
#> Loss at epoch 64: 0.107203, lr: 0.00022
#> Loss at epoch 65: 0.107157, lr: 0.00022
#> Loss at epoch 66: 0.107134, lr: 0.00022
#> Loss at epoch 67: 0.107073, lr: 0.00022
#> Loss at epoch 68: 0.107218, lr: 0.00022
#> Loss at epoch 69: 0.107248, lr: 0.00022
#> Loss at epoch 70: 0.107078, lr: 0.00022
#> Loss at epoch 71: 0.107204, lr: 0.00022
#> Loss at epoch 72: 0.107151, lr: 0.00022
#> Loss at epoch 73: 0.107402, lr: 0.00022
#> Loss at epoch 74: 0.106986, lr: 0.00022
#> Loss at epoch 75: 0.107032, lr: 0.00022
#> Loss at epoch 76: 0.107186, lr: 0.00022
#> Loss at epoch 77: 0.107083, lr: 0.00022
#> Loss at epoch 78: 0.106915, lr: 0.00022
#> Loss at epoch 79: 0.106884, lr: 0.00022
#> Loss at epoch 80: 0.107088, lr: 0.00022
#> Loss at epoch 81: 0.106960, lr: 0.00022
#> Loss at epoch 82: 0.106949, lr: 0.00022
#> Loss at epoch 83: 0.106989, lr: 0.00022
#> Loss at epoch 84: 0.107166, lr: 0.00022
#> Loss at epoch 85: 0.106945, lr: 0.00022
#> Loss at epoch 86: 0.107006, lr: 0.00022
#> Loss at epoch 87: 0.106942, lr: 0.00022
#> Loss at epoch 88: 0.106853, lr: 0.00022
#> Loss at epoch 89: 0.106735, lr: 0.00022
#> Loss at epoch 90: 0.106804, lr: 0.00003
#> Loss at epoch 91: 0.106614, lr: 0.00003
#> Loss at epoch 92: 0.106617, lr: 0.00003
#> Loss at epoch 93: 0.106641, lr: 0.00003
#> Loss at epoch 94: 0.106605, lr: 0.00003
#> Loss at epoch 95: 0.106619, lr: 0.00003
#> Loss at epoch 96: 0.106601, lr: 0.00003
#> Loss at epoch 97: 0.106650, lr: 0.00003
#> Loss at epoch 98: 0.106602, lr: 0.00003
#> Loss at epoch 99: 0.106620, lr: 0.00003
#> Loss at epoch 100: 0.106610, lr: 0.00003
# }