|
|
@ -0,0 +1,23 @@ |
|
|
|
from torch import nn |
|
|
|
|
|
|
|
class BasicNeuralNet(nn.Module): |
|
|
|
def __init__(self): |
|
|
|
super().__init__() |
|
|
|
|
|
|
|
# Inputs to hidden layer linear transformation |
|
|
|
self.hidden = nn.Linear(784, 256) |
|
|
|
# Output layer, 10 units |
|
|
|
self.output = nn.Linear(256, 10) |
|
|
|
|
|
|
|
# Define sigmoid activation and softmax output |
|
|
|
self.sigmoid = nn.Sigmoid() |
|
|
|
self.softmax = nn.Softmax(dim=1) |
|
|
|
|
|
|
|
def forward(self, x): |
|
|
|
# Pass the input tensor through each of the operations |
|
|
|
x = self.hidden(x) |
|
|
|
x = self.sigmoid(x) |
|
|
|
x = self.output(x) |
|
|
|
x = self.softmax(x) |
|
|
|
|
|
|
|
return x |