Functional Activations

Functional Activations#

relu#

def relu(x: nabla.core.tensor.Tensor) -> nabla.core.tensor.Tensor:

leaky_relu#

def leaky_relu(x: nabla.core.tensor.Tensor, negative_slope: float = 0.01) -> nabla.core.tensor.Tensor:

sigmoid#

def sigmoid(x: nabla.core.tensor.Tensor) -> nabla.core.tensor.Tensor:

tanh#

def tanh(x: nabla.core.tensor.Tensor) -> nabla.core.tensor.Tensor:

softmax#

def softmax(x: nabla.core.tensor.Tensor, axis: int = -1) -> nabla.core.tensor.Tensor:

log_softmax#

def log_softmax(x: nabla.core.tensor.Tensor, axis: int = -1) -> nabla.core.tensor.Tensor:

gelu#

def gelu(x: nabla.core.tensor.Tensor) -> nabla.core.tensor.Tensor:

Gaussian Error Linear Unit activation function.