Active function

Created
TagsML Coding
import numpy as np
# Listing all other common activation functions in Python

def sigmoid(x):
    """Sigmoid activation function."""
    return 1 / (1 + np.exp(-x))

def tanh(x):
    """Tanh activation function."""
    return np.tanh(x)

def leaky_relu(x, alpha=0.01):
    """Leaky ReLU activation function."""
    return np.maximum(alpha * x, x)

def elu(x, alpha=1):
    """Exponential Linear Unit (ELU) activation function."""
    return np.where(x > 0, x, alpha * (np.exp(x) - 1))

def softmax(x):
    """Softmax activation function."""
    exp_x = np.exp(x - np.max(x))  # stability improvement: subtract max to avoid overflow
    return exp_x / exp_x.sum(axis=0)

def gelu(x):
    """Gaussian Error Linear Unit (GELU) activation function."""
    return 0.5 * x * (1 + np.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * np.power(x, 3))))