Last active
September 29, 2020 09:46
-
-
Save dbwodlf3/268c34fd348af31293b4f661ff39fa57 to your computer and use it in GitHub Desktop.
Pytorch Example. Batch Normalization
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import torch | |
| import torch.nn as nn | |
| import torch.nn.functional as F | |
| import torch.optim as optim | |
| import numpy as np | |
| # I think my nural network learn x1^3 + x2^2 + x1^1 + 1... | |
| # x 는 -1~1 까지. | |
| # Poly Function... | |
| x1 = np.poly1d([1,0,0,0], variable="x1") # x1^3 | |
| x2 = np.poly1d([0,1,0,0], variable="x2") # x2^2 | |
| x3 = np.poly1d([0,0,1,0], variable="x3") # x^1 | |
| x4 = np.poly1d([0,0,0,1], variable="x4") # 1 | |
| data_number = 365 | |
| data = torch.rand(data_number,4) | |
| answer = torch.rand(365, 1) | |
| count = 0 | |
| for i in data : | |
| result = x1(i[0]) + x2(i[1]) + x3(i[2]) + x4(i[3]) | |
| answer[count] = result | |
| count += 1 | |
| # Training Set 70% , Test Set 30% | |
| data_ratio = int(data_number * 0.7) | |
| training_data = data[:data_ratio] | |
| training_target = answer[:data_ratio] | |
| test_data = data[data_ratio:] | |
| test_target = answer[data_ratio:] | |
| # Make Neural network | |
| class MyNet(nn.Module): | |
| def __init__(self): | |
| super(MyNet, self).__init__() | |
| self.fc1 = nn.Linear(4, 4) | |
| self.fc2 = nn.Linear(4, 4) | |
| self.fc3 = nn.Linear(4, 4) | |
| self.fc4 = nn.Linear(4, 4) | |
| self.fc5 = nn.Linear(4, 1) | |
| def forward(self, x): | |
| x = F.relu(self.fc1(x)) | |
| x = F.relu(self.fc2(x)) | |
| x = F.relu(self.fc3(x)) | |
| x = F.relu(self.fc4(x)) | |
| x = self.fc5(x) | |
| return x | |
| # Instance of Neural Network. | |
| net = MyNet() | |
| # Common Training Variable | |
| epoch = 1000 | |
| learning_rate = 0.01 | |
| # Training | |
| optimizer = optim.SGD(net.parameters(), lr=learning_rate) | |
| criterion = nn.MSELoss() | |
| for i in range(epoch): | |
| optimizer.zero_grad() | |
| output = net(training_data) | |
| loss = criterion(output, training_target) | |
| loss.backward() | |
| optimizer.step() | |
| # Answer | |
| output = net(test_data) | |
| print((output-test_target).mean().abs()) | |
| # My Neural Network using Batch Normalization | |
| class MyNet2(nn.Module): | |
| def __init__(self): | |
| super(MyNet2, self).__init__() | |
| self.fc1 = nn.Linear(4, 4) | |
| self.fc2 = nn.Linear(4, 4) | |
| self.fc3 = nn.Linear(4, 4) | |
| self.fc4 = nn.Linear(4, 4) | |
| self.fc5 = nn.Linear(4, 1) | |
| self.bn = nn.BatchNorm1d(4) | |
| def forward(self, x): | |
| x = F.relu(self.bn(self.fc1(x))) | |
| x = F.relu(self.bn(self.fc2(x))) | |
| x = F.relu(self.bn(self.fc3(x))) | |
| x = F.relu(self.bn(self.fc4(x))) | |
| x = self.fc5(x) | |
| return x | |
| # Instance of Neural Network using batch normalization | |
| net2 = MyNet2() | |
| # Training this is same MyNet. | |
| optimizer = optim.SGD(net2.parameters(), lr=learning_rate) | |
| criterion = nn.MSELoss() | |
| for i in range(epoch): | |
| optimizer.zero_grad() | |
| output = net2(training_data) | |
| loss = criterion(output, training_target) | |
| loss.backward() | |
| optimizer.step() | |
| # Answer | |
| output = net2(test_data) | |
| print((output-test_target).mean().abs()) | |
| print("epoch=", epoch, ", learning_rate", learning_rate) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment