Activation Modules#

ReLU#

class ReLU():

Base class for all neural network modules, inspired by PyTorch’s nn.Module.

Methods#

forward#

def forward(self, x):

LeakyReLU#

class LeakyReLU(negative_slope=0.01):

Base class for all neural network modules, inspired by PyTorch’s nn.Module.

Methods#

forward#

def forward(self, x):

Sigmoid#

class Sigmoid():

Base class for all neural network modules, inspired by PyTorch’s nn.Module.

Methods#

forward#

def forward(self, x):

Tanh#

class Tanh():

Base class for all neural network modules, inspired by PyTorch’s nn.Module.

Methods#

forward#

def forward(self, x):