"""Activation Functions ModuleA comprehensive collection of activation functions and their derivatives for neural networks."""importnumpyasnp
[docs]classActivationFunctions:""" Comprehensive collection of activation functions and their derivatives. Provides implementations of popular activation functions used in neural networks, including their derivatives for backpropagation. All functions are numerically stable and handle edge cases appropriately. """
[docs]@staticmethoddefsigmoid(x):""" Compute sigmoid activation function. Applies the logistic sigmoid function that maps input to (0, 1) range. Includes numerical clipping to prevent overflow in exponential computation. Args: x (NDArray[np.float64]): Input array of any shape. Returns: NDArray[np.float64]: Sigmoid-activated values in range (0, 1). Example: >>> activated = ActivationFunctions.sigmoid(z) >>> # Values are now between 0 and 1 """return1/(1+np.exp(-np.clip(x,-500,500)))
[docs]@staticmethoddefsoftmax(z):# Z = (N, C)z_shift=z-np.max(z,axis=1,keepdims=True)exp_z=np.exp(z_shift)returnexp_z/np.sum(exp_z,axis=1,keepdims=True)
[docs]@staticmethoddefrelu(x):""" Compute ReLU (Rectified Linear Unit) activation. Applies the rectified linear activation function that outputs the input for positive values and zero for negative values. Most popular activation for hidden layers in modern neural networks. Args: x (NDArray[np.float64]): Input array of any shape. Returns: NDArray[np.float64]: ReLU-activated values (non-negative). Example: >>> activated = ActivationFunctions.relu(z) >>> # Negative values become 0, positive values unchanged """returnnp.maximum(0,x)
[docs]@staticmethoddefleaky_relu(x,negative_slope=0.01):""" Compute Leaky ReLU activation function. Variant of ReLU that allows small negative values to flow through, helping to mitigate the "dying ReLU" problem where neurons can become permanently inactive. Args: x (NDArray[np.float64]): Input array of any shape. negative_slope (float, optional): Slope for negative values. Defaults to 0.01. Returns: NDArray[np.float64]: Leaky ReLU-activated values. Example: >>> activated = ActivationFunctions.leaky_relu(z, negative_slope=0.01) >>> # Positive values unchanged, negative values scaled by 0.01 """returnnp.where(x>0,x,negative_slope*x)