Releases: konas122/DaZero
Releases · konas122/DaZero
v1.1
Add Transformers
Add SelfAttention
and TransformerBlock
v0.2
Add LayerNorm
function.
Example1
import numpy as np
import dazero.functions as F
from dazero import Model, Parameter
inputs = np.random.rand(100, 100, 30, 30).astype(np.float64)
normalized_shape = (100, 30, 30)
x = Parameter(inputs)
output = F.layer_norm(x, normalized_shape)
Example2
import numpy as np
import dazero.layers as L
from dazero import Model, Parameter
class Net(Model):
def __init__(self, normalized_shape, gamma=None, beta=None):
super().__init__()
self.layer = L.LayerNorm(normalized_shape, gamma=gamma, beta=beta)
def forward(self, inputs):
return self.layer(inputs)
inputs = np.random.rand(100, 100, 30, 30).astype(np.float64)
normalized_shape = (100, 30, 30)
x = Parameter(inputs)
Layernorm = Net(normalized_shape)
output = Layernorm(x)
output.backward()