(doc/fluxion) export Activation and ScaledDotProductAttention

This commit is contained in:
Laurent 2024-02-01 22:09:28 +00:00 committed by Laureηt
parent c7fd1496b5
commit 9fb9df5f91

View file

@ -1,5 +1,18 @@
from refiners.fluxion.layers.activations import GLU, ApproximateGeLU, GeLU, ReLU, Sigmoid, SiLU from refiners.fluxion.layers.activations import (
from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d GLU,
Activation,
ApproximateGeLU,
GeLU,
ReLU,
Sigmoid,
SiLU,
)
from refiners.fluxion.layers.attentions import (
Attention,
ScaledDotProductAttention,
SelfAttention,
SelfAttention2d,
)
from refiners.fluxion.layers.basics import ( from refiners.fluxion.layers.basics import (
Cos, Cos,
Flatten, Flatten,
@ -49,6 +62,7 @@ __all__ = [
"GroupNorm", "GroupNorm",
"LayerNorm2d", "LayerNorm2d",
"InstanceNorm2d", "InstanceNorm2d",
"Activation",
"GeLU", "GeLU",
"GLU", "GLU",
"SiLU", "SiLU",
@ -56,6 +70,7 @@ __all__ = [
"ApproximateGeLU", "ApproximateGeLU",
"Sigmoid", "Sigmoid",
"Attention", "Attention",
"ScaledDotProductAttention",
"SelfAttention", "SelfAttention",
"SelfAttention2d", "SelfAttention2d",
"Identity", "Identity",