Skip to content

velora.models.activation

Utility methods for activation functions.

ActivationEnum

Bases: Enum

An Enum for PyTorch activation functions.

Useful for getting activation functions dynamically using a string name. Refer to the get() method for more details.

Source code in velora/models/activation.py
Python
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
class ActivationEnum(Enum):
    """
    An Enum for PyTorch activation functions.

    Useful for getting activation functions dynamically using a `string` name.
    Refer to the `get()` method for more details.
    """

    RELU = nn.ReLU()
    TANH = nn.Tanh()
    ELU = nn.ELU()
    LEAKY_RELU = nn.LeakyReLU()
    PRELU = nn.PReLU()
    SELU = nn.SELU()
    SILU = nn.GELU()
    SOFTSIGN = nn.Softsign()
    SIGMOID = nn.Sigmoid()
    HARDSIGMOID = nn.Hardsigmoid()
    LECUN_TANH = LeCunTanh()

    @classmethod
    def get(cls, name: ActivationTypeLiteral) -> nn.Module:
        """
        Get the `torch.nn` activation function.

        Parameters:
            name (Literal["relu", "tanh", "elu", "leaky_relu", "prelu", "selu", "silu", "softsign", "sigmoid", "hardsigmoid", "lecun_tanh"]):
                the name of the activation function.

        Returns:
            activation (nn.Module): the PyTorch activation module.
        """
        try:
            return cls[name.upper()].value
        except KeyError:
            raise ValueError(f"Unsupported activation function: {name}")

get(name) classmethod

Get the torch.nn activation function.

Parameters:

Name Type Description Default
name Literal['relu', 'tanh', 'elu', 'leaky_relu', 'prelu', 'selu', 'silu', 'softsign', 'sigmoid', 'hardsigmoid', 'lecun_tanh']

the name of the activation function.

required

Returns:

Name Type Description
activation nn.Module

the PyTorch activation module.

Source code in velora/models/activation.py
Python
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
@classmethod
def get(cls, name: ActivationTypeLiteral) -> nn.Module:
    """
    Get the `torch.nn` activation function.

    Parameters:
        name (Literal["relu", "tanh", "elu", "leaky_relu", "prelu", "selu", "silu", "softsign", "sigmoid", "hardsigmoid", "lecun_tanh"]):
            the name of the activation function.

    Returns:
        activation (nn.Module): the PyTorch activation module.
    """
    try:
        return cls[name.upper()].value
    except KeyError:
        raise ValueError(f"Unsupported activation function: {name}")

LeCunTanh

Bases: nn.Module

Implements LeCun's Tanh activation function. $$ f(x) = 1.7159 \tanh (\frac{2}{3} x) $$ Constants are applied to keep the variance of the output close to 1.

Source code in velora/models/activation.py
Python
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
class LeCunTanh(nn.Module):
    """
    Implements LeCun's Tanh activation function.
    $$
    f(x) = 1.7159 \\tanh (\\frac{2}{3} x)
    $$
    Constants are applied to keep the variance of the output close to `1`.
    """

    def __init__(self) -> None:
        super().__init__()

        self.tanh = nn.Tanh()

    def forward(self, x: torch.Tensor) -> torch.Tensor:
        return 1.7159 * self.tanh(0.666 * x)