Browse Source

Merge pull request #21 from eliasbayona/master

Added Basic Neural Network
pull/26/head
Jeffery Russell 5 years ago
committed by GitHub
parent
commit
16aae11a21
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 39 additions and 0 deletions
  1. +23
    -0
      ML Cookbook/BasicNeuralNet.py
  2. +16
    -0
      ML Cookbook/CNNs/BasicCNN.py

+ 23
- 0
ML Cookbook/BasicNeuralNet.py View File

@ -0,0 +1,23 @@
from torch import nn
class BasicNeuralNet(nn.Module):
def __init__(self):
super().__init__()
# Inputs to hidden layer linear transformation
self.hidden = nn.Linear(784, 256)
# Output layer, 10 units
self.output = nn.Linear(256, 10)
# Define sigmoid activation and softmax output
self.sigmoid = nn.Sigmoid()
self.softmax = nn.Softmax(dim=1)
def forward(self, x):
# Pass the input tensor through each of the operations
x = self.hidden(x)
x = self.sigmoid(x)
x = self.output(x)
x = self.softmax(x)
return x

+ 16
- 0
ML Cookbook/CNNs/BasicCNN.py View File

@ -0,0 +1,16 @@
import torch.nn as nn
import torch.nn.functional as F
# CNN architecture definition
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# convolutional layer
self.conv1 = nn.Conv2d(3, 16, 3, padding=1)
# max pooling layer
self.pool = nn.MaxPool2d(2, 2)
def forward(self, x):
# add sequence of convolutional and max pooling layers
x = self.pool(F.relu(self.conv1(x)))
return x

Loading…
Cancel
Save