mirror of
https://github.com/finegrain-ai/refiners.git
synced 2024-11-21 21:58:47 +00:00
(doc/fluxion) export Activation
and ScaledDotProductAttention
This commit is contained in:
parent
c7fd1496b5
commit
9fb9df5f91
|
@ -1,5 +1,18 @@
|
||||||
from refiners.fluxion.layers.activations import GLU, ApproximateGeLU, GeLU, ReLU, Sigmoid, SiLU
|
from refiners.fluxion.layers.activations import (
|
||||||
from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d
|
GLU,
|
||||||
|
Activation,
|
||||||
|
ApproximateGeLU,
|
||||||
|
GeLU,
|
||||||
|
ReLU,
|
||||||
|
Sigmoid,
|
||||||
|
SiLU,
|
||||||
|
)
|
||||||
|
from refiners.fluxion.layers.attentions import (
|
||||||
|
Attention,
|
||||||
|
ScaledDotProductAttention,
|
||||||
|
SelfAttention,
|
||||||
|
SelfAttention2d,
|
||||||
|
)
|
||||||
from refiners.fluxion.layers.basics import (
|
from refiners.fluxion.layers.basics import (
|
||||||
Cos,
|
Cos,
|
||||||
Flatten,
|
Flatten,
|
||||||
|
@ -49,6 +62,7 @@ __all__ = [
|
||||||
"GroupNorm",
|
"GroupNorm",
|
||||||
"LayerNorm2d",
|
"LayerNorm2d",
|
||||||
"InstanceNorm2d",
|
"InstanceNorm2d",
|
||||||
|
"Activation",
|
||||||
"GeLU",
|
"GeLU",
|
||||||
"GLU",
|
"GLU",
|
||||||
"SiLU",
|
"SiLU",
|
||||||
|
@ -56,6 +70,7 @@ __all__ = [
|
||||||
"ApproximateGeLU",
|
"ApproximateGeLU",
|
||||||
"Sigmoid",
|
"Sigmoid",
|
||||||
"Attention",
|
"Attention",
|
||||||
|
"ScaledDotProductAttention",
|
||||||
"SelfAttention",
|
"SelfAttention",
|
||||||
"SelfAttention2d",
|
"SelfAttention2d",
|
||||||
"Identity",
|
"Identity",
|
||||||
|
|
Loading…
Reference in a new issue