Skip to contents

Creates graph plot which gives an overview of the network architecture.

Usage

# S3 method for citodnn
plot(x, node_size = 1, scale_edges = FALSE, ...)

# S3 method for citodnnBootstrap
plot(x, node_size = 1, scale_edges = FALSE, which_model = 1, ...)

Arguments

x

a model created by dnn

node_size

size of node in plot

scale_edges

edge weight gets scaled according to other weights (layer specific)

...

no further functionality implemented yet

which_model

which model from the ensemble should be plotted

Value

A plot made with 'ggraph' + 'igraph' that represents the neural network

Examples

# \donttest{
if(torch::torch_is_installed()){
library(cito)

set.seed(222)
validation_set<- sample(c(1:nrow(datasets::iris)),25)

# Build and train  Network
nn.fit<- dnn(Sepal.Length~., data = datasets::iris[-validation_set,])

plot(nn.fit)
}
#> Loss at epoch 1: 2.801770, lr: 0.01000

#> Loss at epoch 2: 0.275557, lr: 0.01000
#> Loss at epoch 3: 0.175860, lr: 0.01000
#> Loss at epoch 4: 0.150413, lr: 0.01000
#> Loss at epoch 5: 0.486815, lr: 0.01000
#> Loss at epoch 6: 0.217457, lr: 0.01000
#> Loss at epoch 7: 0.196294, lr: 0.01000
#> Loss at epoch 8: 0.228107, lr: 0.01000
#> Loss at epoch 9: 0.263221, lr: 0.01000
#> Loss at epoch 10: 0.129296, lr: 0.01000
#> Loss at epoch 11: 0.162786, lr: 0.01000
#> Loss at epoch 12: 0.130253, lr: 0.01000
#> Loss at epoch 13: 0.141011, lr: 0.01000
#> Loss at epoch 14: 0.127830, lr: 0.01000
#> Loss at epoch 15: 0.246724, lr: 0.01000
#> Loss at epoch 16: 0.209291, lr: 0.01000
#> Loss at epoch 17: 0.182202, lr: 0.01000
#> Loss at epoch 18: 0.151140, lr: 0.01000
#> Loss at epoch 19: 0.148965, lr: 0.01000
#> Loss at epoch 20: 0.228847, lr: 0.01000
#> Loss at epoch 21: 0.209318, lr: 0.01000
#> Loss at epoch 22: 0.280721, lr: 0.01000
#> Loss at epoch 23: 0.182382, lr: 0.01000
#> Loss at epoch 24: 0.224649, lr: 0.01000
#> Loss at epoch 25: 0.184276, lr: 0.01000
#> Loss at epoch 26: 0.154152, lr: 0.01000
#> Loss at epoch 27: 0.346717, lr: 0.01000
#> Loss at epoch 28: 0.196521, lr: 0.01000
#> Loss at epoch 29: 0.137130, lr: 0.01000
#> Loss at epoch 30: 0.122983, lr: 0.01000
#> Loss at epoch 31: 0.136702, lr: 0.01000
#> Loss at epoch 32: 0.132549, lr: 0.01000
#> Loss at epoch 33: 0.127557, lr: 0.01000
#> Loss at epoch 34: 0.145227, lr: 0.01000
#> Loss at epoch 35: 0.251761, lr: 0.01000
#> Loss at epoch 36: 0.328268, lr: 0.01000
#> Loss at epoch 37: 0.151723, lr: 0.01000
#> Loss at epoch 38: 0.144993, lr: 0.01000
#> Loss at epoch 39: 0.182769, lr: 0.01000
#> Loss at epoch 40: 0.314023, lr: 0.01000
#> Loss at epoch 41: 0.160323, lr: 0.01000
#> Loss at epoch 42: 0.162125, lr: 0.01000
#> Loss at epoch 43: 0.152780, lr: 0.01000
#> Loss at epoch 44: 0.134194, lr: 0.01000
#> Loss at epoch 45: 0.130431, lr: 0.01000
#> Loss at epoch 46: 0.202412, lr: 0.01000
#> Loss at epoch 47: 0.114222, lr: 0.01000
#> Loss at epoch 48: 0.134736, lr: 0.01000
#> Loss at epoch 49: 0.119567, lr: 0.01000
#> Loss at epoch 50: 0.266536, lr: 0.01000
#> Loss at epoch 51: 0.198505, lr: 0.01000
#> Loss at epoch 52: 0.132653, lr: 0.01000
#> Loss at epoch 53: 0.109017, lr: 0.01000
#> Loss at epoch 54: 0.120107, lr: 0.01000
#> Loss at epoch 55: 0.171424, lr: 0.01000
#> Loss at epoch 56: 0.116199, lr: 0.01000
#> Loss at epoch 57: 0.242433, lr: 0.01000
#> Loss at epoch 58: 0.221779, lr: 0.01000
#> Loss at epoch 59: 0.144520, lr: 0.01000
#> Loss at epoch 60: 0.105594, lr: 0.01000
#> Loss at epoch 61: 0.157170, lr: 0.01000
#> Loss at epoch 62: 0.126064, lr: 0.01000
#> Loss at epoch 63: 0.120549, lr: 0.01000
#> Loss at epoch 64: 0.104931, lr: 0.01000
#> Loss at epoch 65: 0.144847, lr: 0.01000
#> Loss at epoch 66: 0.204286, lr: 0.01000
#> Loss at epoch 67: 0.115809, lr: 0.01000
#> Loss at epoch 68: 0.168311, lr: 0.01000
#> Loss at epoch 69: 0.182119, lr: 0.01000
#> Loss at epoch 70: 0.165468, lr: 0.01000
#> Loss at epoch 71: 0.257749, lr: 0.01000
#> Loss at epoch 72: 0.135350, lr: 0.01000
#> Loss at epoch 73: 0.137453, lr: 0.01000
#> Loss at epoch 74: 0.138465, lr: 0.01000
#> Loss at epoch 75: 0.109772, lr: 0.01000
#> Loss at epoch 76: 0.105120, lr: 0.01000
#> Loss at epoch 77: 0.191728, lr: 0.01000
#> Loss at epoch 78: 0.249107, lr: 0.01000
#> Loss at epoch 79: 0.118304, lr: 0.01000
#> Loss at epoch 80: 0.151521, lr: 0.01000
#> Loss at epoch 81: 0.135802, lr: 0.01000
#> Loss at epoch 82: 0.119277, lr: 0.01000
#> Loss at epoch 83: 0.162845, lr: 0.01000
#> Loss at epoch 84: 0.151614, lr: 0.01000
#> Loss at epoch 85: 0.156558, lr: 0.01000
#> Loss at epoch 86: 0.103386, lr: 0.01000
#> Loss at epoch 87: 0.128373, lr: 0.01000
#> Loss at epoch 88: 0.133497, lr: 0.01000
#> Loss at epoch 89: 0.149314, lr: 0.01000
#> Loss at epoch 90: 0.138891, lr: 0.01000
#> Loss at epoch 91: 0.131635, lr: 0.01000
#> Loss at epoch 92: 0.113433, lr: 0.01000
#> Loss at epoch 93: 0.128158, lr: 0.01000
#> Loss at epoch 94: 0.101326, lr: 0.01000
#> Loss at epoch 95: 0.143513, lr: 0.01000
#> Loss at epoch 96: 0.190508, lr: 0.01000
#> Loss at epoch 97: 0.174158, lr: 0.01000
#> Loss at epoch 98: 0.115550, lr: 0.01000
#> Loss at epoch 99: 0.124246, lr: 0.01000
#> Loss at epoch 100: 0.100469, lr: 0.01000

# }