-
-
Notifications
You must be signed in to change notification settings - Fork 48
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Kye
committed
May 12, 2024
1 parent
2e6e0b6
commit b9abb28
Showing
8 changed files
with
60 additions
and
13 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
from zeta.nn import FractoralNorm # Importing the FractoralNorm class from the zeta.nn module | ||
import torch # Importing the torch module for tensor operations | ||
|
||
# Norm | ||
x = torch.randn(2, 3, 4) # Generating a random tensor of size (2, 3, 4) | ||
|
||
# FractoralNorm | ||
normed = FractoralNorm(4, 4)(x) # Applying the FractoralNorm operation to the tensor x | ||
|
||
print(normed) # Printing the size of the resulting tensor, which should be torch.Size([2, 3, 4]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
from torch.nn import Module | ||
import torch | ||
from torch import nn, Tensor | ||
|
||
class LayerScale(Module): | ||
""" | ||
Applies layer scaling to the output of a given module. | ||
Args: | ||
fn (Module): The module to apply layer scaling to. | ||
dim (int): The dimension along which to apply the scaling. | ||
init_value (float, optional): The initial value for the scaling factor. Defaults to 0. | ||
Attributes: | ||
fn (Module): The module to apply layer scaling to. | ||
gamma (Parameter): The scaling factor parameter. | ||
""" | ||
|
||
def __init__(self, fn: Module, dim, init_value=0.): | ||
super().__init__() | ||
self.fn = fn | ||
self.gamma = nn.Parameter(torch.ones(dim) * init_value) | ||
|
||
def forward(self, x, **kwargs): | ||
out = self.fn(x, **kwargs) | ||
|
||
if isinstance(out, Tensor): | ||
return out * self.gamma | ||
|
||
out, *rest = out | ||
return out * self.gamma, *rest |