add Sigmoid activation

This commit is contained in:
Pierre Chapuis 2023-08-07 16:25:58 +02:00
parent f49bb4f5fd
commit bd49304fc8
2 changed files with 10 additions and 1 deletions

View file

@ -1,4 +1,4 @@
from refiners.fluxion.layers.activations import GLU, SiLU, ReLU, ApproximateGeLU, GeLU from refiners.fluxion.layers.activations import GLU, SiLU, ReLU, ApproximateGeLU, GeLU, Sigmoid
from refiners.fluxion.layers.norm import LayerNorm, GroupNorm, LayerNorm2d from refiners.fluxion.layers.norm import LayerNorm, GroupNorm, LayerNorm2d
from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d
from refiners.fluxion.layers.basics import ( from refiners.fluxion.layers.basics import (
@ -44,6 +44,7 @@ __all__ = [
"SiLU", "SiLU",
"ReLU", "ReLU",
"ApproximateGeLU", "ApproximateGeLU",
"Sigmoid",
"Attention", "Attention",
"SelfAttention", "SelfAttention",
"SelfAttention2d", "SelfAttention2d",

View file

@ -46,6 +46,14 @@ class ApproximateGeLU(Activation):
return x * sigmoid(1.702 * x) return x * sigmoid(1.702 * x)
class Sigmoid(Activation):
def __init__(self) -> None:
super().__init__()
def forward(self, x: Tensor) -> Tensor:
return x.sigmoid()
class GLU(Activation): class GLU(Activation):
""" """
Gated Linear Unit activation layer. Gated Linear Unit activation layer.