From bd49304fc8f03e1592b041a3adf0d7ac3c859f80 Mon Sep 17 00:00:00 2001 From: Pierre Chapuis Date: Mon, 7 Aug 2023 16:25:58 +0200 Subject: [PATCH] add Sigmoid activation --- src/refiners/fluxion/layers/__init__.py | 3 ++- src/refiners/fluxion/layers/activations.py | 8 ++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/refiners/fluxion/layers/__init__.py b/src/refiners/fluxion/layers/__init__.py index 3cefd15..170bac6 100644 --- a/src/refiners/fluxion/layers/__init__.py +++ b/src/refiners/fluxion/layers/__init__.py @@ -1,4 +1,4 @@ -from refiners.fluxion.layers.activations import GLU, SiLU, ReLU, ApproximateGeLU, GeLU +from refiners.fluxion.layers.activations import GLU, SiLU, ReLU, ApproximateGeLU, GeLU, Sigmoid from refiners.fluxion.layers.norm import LayerNorm, GroupNorm, LayerNorm2d from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d from refiners.fluxion.layers.basics import ( @@ -44,6 +44,7 @@ __all__ = [ "SiLU", "ReLU", "ApproximateGeLU", + "Sigmoid", "Attention", "SelfAttention", "SelfAttention2d", diff --git a/src/refiners/fluxion/layers/activations.py b/src/refiners/fluxion/layers/activations.py index b023199..eca9afd 100644 --- a/src/refiners/fluxion/layers/activations.py +++ b/src/refiners/fluxion/layers/activations.py @@ -46,6 +46,14 @@ class ApproximateGeLU(Activation): return x * sigmoid(1.702 * x) +class Sigmoid(Activation): + def __init__(self) -> None: + super().__init__() + + def forward(self, x: Tensor) -> Tensor: + return x.sigmoid() + + class GLU(Activation): """ Gated Linear Unit activation layer.