Skip to content

Commit

Permalink
Rewrite MNIST model to fuse bias back into linear layer (#164) (#188)
Browse files Browse the repository at this point in the history
  • Loading branch information
sdjordjevicTT authored Aug 29, 2024
1 parent 8ef4d20 commit 5626283
Showing 1 changed file with 2 additions and 6 deletions.
8 changes: 2 additions & 6 deletions pybuda/test/mlir/mnist/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,14 @@
class MNISTLinear(nn.Module):
def __init__(self, input_size=784, output_size=10, hidden_size=256):
super(MNISTLinear, self).__init__()
self.l1 = nn.Linear(input_size, hidden_size, bias=False)
self.b1 = nn.Parameter(torch.ones(1, hidden_size))
self.l1 = nn.Linear(input_size, hidden_size)
self.relu = nn.ReLU()
self.b2 = nn.Parameter(torch.ones(1, output_size))
self.l2 = nn.Linear(hidden_size, output_size, bias=False)
self.l2 = nn.Linear(hidden_size, output_size)

def forward(self, x):
x = self.l1(x)
x = x + self.b1
x = self.relu(x)
x = self.l2(x)
x = x + self.b2

return nn.functional.softmax(x)

Expand Down

0 comments on commit 5626283

Please sign in to comment.