mirror of
https://github.com/finegrain-ai/refiners.git
synced 2024-11-09 23:12:02 +00:00
remove unused Chunk and Unbind layers
This commit is contained in:
parent
c57f2228f8
commit
e6be1394ff
|
@ -2,7 +2,6 @@ from refiners.fluxion.layers.activations import GLU, ApproximateGeLU, GeLU, ReLU
|
|||
from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d
|
||||
from refiners.fluxion.layers.basics import (
|
||||
Buffer,
|
||||
Chunk,
|
||||
Cos,
|
||||
Flatten,
|
||||
GetArg,
|
||||
|
@ -15,7 +14,6 @@ from refiners.fluxion.layers.basics import (
|
|||
Slicing,
|
||||
Squeeze,
|
||||
Transpose,
|
||||
Unbind,
|
||||
Unflatten,
|
||||
Unsqueeze,
|
||||
View,
|
||||
|
@ -75,9 +73,7 @@ __all__ = [
|
|||
"Parameter",
|
||||
"Sin",
|
||||
"Cos",
|
||||
"Chunk",
|
||||
"Multiply",
|
||||
"Unbind",
|
||||
"Matmul",
|
||||
"Buffer",
|
||||
"Lambda",
|
||||
|
|
|
@ -130,25 +130,6 @@ class Unsqueeze(Module):
|
|||
return x.unsqueeze(self.dim)
|
||||
|
||||
|
||||
class Unbind(Module):
|
||||
def __init__(self, dim: int = 0) -> None:
|
||||
self.dim = dim
|
||||
super().__init__()
|
||||
|
||||
def forward(self, x: Tensor) -> tuple[Tensor, ...]:
|
||||
return x.unbind(dim=self.dim) # type: ignore
|
||||
|
||||
|
||||
class Chunk(Module):
|
||||
def __init__(self, chunks: int, dim: int = 0) -> None:
|
||||
self.chunks = chunks
|
||||
self.dim = dim
|
||||
super().__init__()
|
||||
|
||||
def forward(self, x: Tensor) -> tuple[Tensor, ...]:
|
||||
return x.chunk(chunks=self.chunks, dim=self.dim) # type: ignore
|
||||
|
||||
|
||||
class Sin(Module):
|
||||
def forward(self, x: Tensor) -> Tensor:
|
||||
return torch.sin(input=x)
|
||||
|
|
Loading…
Reference in a new issue