mirror of
https://github.com/finegrain-ai/refiners.git
synced 2024-11-21 21:58:47 +00:00
add Sigmoid activation
This commit is contained in:
parent
f49bb4f5fd
commit
bd49304fc8
|
@ -1,4 +1,4 @@
|
||||||
from refiners.fluxion.layers.activations import GLU, SiLU, ReLU, ApproximateGeLU, GeLU
|
from refiners.fluxion.layers.activations import GLU, SiLU, ReLU, ApproximateGeLU, GeLU, Sigmoid
|
||||||
from refiners.fluxion.layers.norm import LayerNorm, GroupNorm, LayerNorm2d
|
from refiners.fluxion.layers.norm import LayerNorm, GroupNorm, LayerNorm2d
|
||||||
from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d
|
from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d
|
||||||
from refiners.fluxion.layers.basics import (
|
from refiners.fluxion.layers.basics import (
|
||||||
|
@ -44,6 +44,7 @@ __all__ = [
|
||||||
"SiLU",
|
"SiLU",
|
||||||
"ReLU",
|
"ReLU",
|
||||||
"ApproximateGeLU",
|
"ApproximateGeLU",
|
||||||
|
"Sigmoid",
|
||||||
"Attention",
|
"Attention",
|
||||||
"SelfAttention",
|
"SelfAttention",
|
||||||
"SelfAttention2d",
|
"SelfAttention2d",
|
||||||
|
|
|
@ -46,6 +46,14 @@ class ApproximateGeLU(Activation):
|
||||||
return x * sigmoid(1.702 * x)
|
return x * sigmoid(1.702 * x)
|
||||||
|
|
||||||
|
|
||||||
|
class Sigmoid(Activation):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
def forward(self, x: Tensor) -> Tensor:
|
||||||
|
return x.sigmoid()
|
||||||
|
|
||||||
|
|
||||||
class GLU(Activation):
|
class GLU(Activation):
|
||||||
"""
|
"""
|
||||||
Gated Linear Unit activation layer.
|
Gated Linear Unit activation layer.
|
||||||
|
|
Loading…
Reference in a new issue