All modules for which code is available
- mygrad._io
- mygrad._utils.lock_management
- mygrad.computational_graph
- mygrad.indexing_routines.funcs
- mygrad.linalg.funcs
- mygrad.math.arithmetic.funcs
- mygrad.math.misc.funcs
- mygrad.math.sequential.funcs
- mygrad.math.trigonometric.funcs
- mygrad.nnet.activations.elu
- mygrad.nnet.activations.glu
- mygrad.nnet.activations.hard_tanh
- mygrad.nnet.activations.leaky_relu
- mygrad.nnet.activations.relu
- mygrad.nnet.activations.selu
- mygrad.nnet.activations.sigmoid
- mygrad.nnet.activations.soft_sign
- mygrad.nnet.activations.softmax
- mygrad.nnet.initializers.dirac
- mygrad.nnet.initializers.glorot_normal
- mygrad.nnet.initializers.glorot_uniform
- mygrad.nnet.initializers.he_normal
- mygrad.nnet.initializers.he_uniform
- mygrad.nnet.initializers.normal
- mygrad.nnet.initializers.uniform
- mygrad.nnet.layers.batchnorm
- mygrad.nnet.layers.conv
- mygrad.nnet.layers.gru
- mygrad.nnet.layers.pooling
- mygrad.nnet.layers.utils
- mygrad.nnet.losses.focal_loss
- mygrad.nnet.losses.margin_ranking_loss
- mygrad.nnet.losses.multiclass_hinge
- mygrad.nnet.losses.negative_log_likelihood
- mygrad.nnet.losses.softmax_crossentropy
- mygrad.operation_base
- mygrad.random.funcs
- mygrad.tensor_base
- mygrad.tensor_creation.funcs
- mygrad.tensor_manip.array_shape.funcs
- mygrad.tensor_manip.tensor_joining.funcs
- mygrad.tensor_manip.tiling.funcs
- mygrad.tensor_manip.transpose_like.funcs