From 9fb9df5f9178d12b54600b298e4059da0b286f28 Mon Sep 17 00:00:00 2001 From: Laurent Date: Thu, 1 Feb 2024 22:09:28 +0000 Subject: [PATCH] (doc/fluxion) export `Activation` and `ScaledDotProductAttention` --- src/refiners/fluxion/layers/__init__.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/refiners/fluxion/layers/__init__.py b/src/refiners/fluxion/layers/__init__.py index 1c1a03f..00ab130 100644 --- a/src/refiners/fluxion/layers/__init__.py +++ b/src/refiners/fluxion/layers/__init__.py @@ -1,5 +1,18 @@ -from refiners.fluxion.layers.activations import GLU, ApproximateGeLU, GeLU, ReLU, Sigmoid, SiLU -from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d +from refiners.fluxion.layers.activations import ( + GLU, + Activation, + ApproximateGeLU, + GeLU, + ReLU, + Sigmoid, + SiLU, +) +from refiners.fluxion.layers.attentions import ( + Attention, + ScaledDotProductAttention, + SelfAttention, + SelfAttention2d, +) from refiners.fluxion.layers.basics import ( Cos, Flatten, @@ -49,6 +62,7 @@ __all__ = [ "GroupNorm", "LayerNorm2d", "InstanceNorm2d", + "Activation", "GeLU", "GLU", "SiLU", @@ -56,6 +70,7 @@ __all__ = [ "ApproximateGeLU", "Sigmoid", "Attention", + "ScaledDotProductAttention", "SelfAttention", "SelfAttention2d", "Identity",