Created
April 13, 2019 05:26
-
-
Save lorne-luo/31e589da55e6473edd8272992723cebf to your computer and use it in GitHub Desktop.
Neural Network from Scratch with PyTorch
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"cells": [ | |
{ | |
"metadata": { | |
"deletable": false, | |
"editable": false, | |
"run_control": { | |
"frozen": true | |
} | |
}, | |
"cell_type": "markdown", | |
"source": "" | |
}, | |
{ | |
"metadata": { | |
"trusted": true | |
}, | |
"cell_type": "code", | |
"source": "# pip install torch torchvision\nimport torch\nimport torch.nn as nn", | |
"execution_count": 1, | |
"outputs": [] | |
}, | |
{ | |
"metadata": { | |
"trusted": true | |
}, | |
"cell_type": "code", | |
"source": "# creating some sample data using the torch.tensor\n\n# X represents the amount of hours studied and how much time students spent sleeping\nX = torch.tensor(([2, 9], [1, 5], [3, 6]), dtype=torch.float) # 3 X 2 tensor\n# y represent grades\ny = torch.tensor(([92], [100], [89]), dtype=torch.float) # 3 X 1 tensor\n\n# xPredicted is a single input for which we want to predict a grade \nxPredicted = torch.tensor(([4, 8]), dtype=torch.float) # 1 X 2 tensor\n\n# equivelent to numpy.shape\nprint(X.size())\nprint(y.size())", | |
"execution_count": 2, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": "torch.Size([3, 2])\ntorch.Size([3, 1])\n" | |
} | |
] | |
}, | |
{ | |
"metadata": { | |
"scrolled": true, | |
"trusted": true | |
}, | |
"cell_type": "code", | |
"source": "# performing some scaling on the sample data\n# scale units\nX_max, _ = torch.max(X, 0)\nxPredicted_max, _ = torch.max(xPredicted, 0)\n\nprint(X)\nX = torch.div(X, X_max)\nprint(X)\n\nxPredicted = torch.div(xPredicted, xPredicted_max)\ny = y / 100 # max test score is 100\n\nprint(xPredicted)", | |
"execution_count": 3, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": "tensor([[2., 9.],\n [1., 5.],\n [3., 6.]])\ntensor([[0.6667, 1.0000],\n [0.3333, 0.5556],\n [1.0000, 0.6667]])\ntensor([0.5000, 1.0000])\n" | |
} | |
] | |
}, | |
{ | |
"metadata": { | |
"trusted": true | |
}, | |
"cell_type": "code", | |
"source": "# Computate Graph\n\nclass Neural_Network(nn.Module):\n def __init__(self, ):\n super(Neural_Network, self).__init__()\n # parameters\n # TODO: parameters can be parameterized instead of declaring them here\n self.inputSize = 2\n self.outputSize = 1\n self.hiddenSize = 3\n \n # weights matrices are initialized with values randomly chosen from a normal distribution\n self.W1 = torch.randn(self.inputSize, self.hiddenSize) # 3 X 2 tensor\n self.W2 = torch.randn(self.hiddenSize, self.outputSize) # 3 X 1 tensor\n \n def forward(self, X):\n self.z = torch.matmul(X, self.W1) # 3 X 3 \".dot\" does not broadcast in PyTorch\n self.z2 = self.sigmoid(self.z) # activation function\n self.z3 = torch.matmul(self.z2, self.W2)\n o = self.sigmoid(self.z3) # final activation function\n return o\n \n def sigmoid(self, s):\n return 1 / (1 + torch.exp(-s))\n \n def sigmoidPrime(self, s):\n # derivative of sigmoid\n return s * (1 - s)\n \n def backward(self, X, y, o):\n self.o_error = y - o # error in output\n self.o_delta = self.o_error * self.sigmoidPrime(o) # derivative of sig to error\n self.z2_error = torch.matmul(self.o_delta, torch.t(self.W2))\n self.z2_delta = self.z2_error * self.sigmoidPrime(self.z2)\n self.W1 += torch.matmul(torch.t(X), self.z2_delta)\n self.W2 += torch.matmul(torch.t(self.z2), self.o_delta)\n \n def train(self, X, y):\n # forward + backward pass for training\n o = self.forward(X)\n \n self.backward(X, y, o)\n \n def saveWeights(self, model):\n # we will use the PyTorch internal storage functions\n torch.save(model, \"NN\")\n # you can reload model with all the weights and so forth with:\n # torch.load(\"NN\")\n \n def predict(self):\n print (\"Predicted data based on trained weights: \")\n print (\"Input (scaled): \\n\" + str(xPredicted))\n print (\"Output: \\n\" + str(self.forward(xPredicted)))", | |
"execution_count": 9, | |
"outputs": [] | |
}, | |
{ | |
"metadata": { | |
"trusted": true | |
}, | |
"cell_type": "code", | |
"source": "NN = Neural_Network()\nfor i in range(1000): # trains the NN 1,000 times\n print (\"#\" + str(i) + \" Loss: \" + str(torch.mean((y - NN(X))**2).detach().item())) # mean sum squared loss\n NN.train(X, y)\nNN.saveWeights(NN)\nNN.predict()", | |
"execution_count": 10, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": "#0 Loss: 0.007429896388202906\n#1 Loss: 0.007136822212487459\n#2 Loss: 0.00687433173879981\n#3 Loss: 0.006638271268457174\n#4 Loss: 0.006425193976610899\n#5 Loss: 0.006232195068150759\n#6 Loss: 0.006056810263544321\n#7 Loss: 0.005896960850805044\n#8 Loss: 0.005750857759267092\n#9 Loss: 0.005616967100650072\n#10 Loss: 0.005493972450494766\n#11 Loss: 0.005380699876695871\n#12 Loss: 0.0052761803381145\n#13 Loss: 0.005179520696401596\n#14 Loss: 0.005089965183287859\n#15 Loss: 0.005006836261600256\n#16 Loss: 0.004929537419229746\n#17 Loss: 0.00485753221437335\n#18 Loss: 0.004790371749550104\n#19 Loss: 0.004727609921246767\n#20 Loss: 0.004668896552175283\n#21 Loss: 0.004613881465047598\n#22 Loss: 0.004562269896268845\n#23 Loss: 0.00451379781588912\n#24 Loss: 0.004468204919248819\n#25 Loss: 0.0044252811931073666\n#26 Loss: 0.004384817089885473\n#27 Loss: 0.0043466477654874325\n#28 Loss: 0.0043105920776724815\n#29 Loss: 0.004276504274457693\n#30 Loss: 0.004244245123118162\n#31 Loss: 0.004213692154735327\n#32 Loss: 0.004184729885309935\n#33 Loss: 0.004157244693487883\n#34 Loss: 0.00413114856928587\n#35 Loss: 0.00410634558647871\n#36 Loss: 0.004082761239260435\n#37 Loss: 0.004060313571244478\n#38 Loss: 0.004038934130221605\n#39 Loss: 0.004018553998321295\n#40 Loss: 0.0039991228841245174\n#41 Loss: 0.0039805760607123375\n#42 Loss: 0.003962861839681864\n#43 Loss: 0.003945937845855951\n#44 Loss: 0.003929756581783295\n#45 Loss: 0.003914279397577047\n#46 Loss: 0.0038994650822132826\n#47 Loss: 0.0038852717261761427\n#48 Loss: 0.0038716711569577456\n#49 Loss: 0.00385863333940506\n#50 Loss: 0.0038461231160908937\n#51 Loss: 0.0038341206964105368\n#52 Loss: 0.00382259301841259\n#53 Loss: 0.0038115207571536303\n#54 Loss: 0.0038008783012628555\n#55 Loss: 0.0037906423676759005\n#56 Loss: 0.003780798986554146\n#57 Loss: 0.003771319054067135\n#58 Loss: 0.0037621997762471437\n#59 Loss: 0.0037534136790782213\n#60 Loss: 0.0037449419032782316\n#61 Loss: 0.00373678095638752\n#62 Loss: 0.0037289049942046404\n#63 Loss: 0.003721313551068306\n#64 Loss: 0.0037139784544706345\n#65 Loss: 0.003706900170072913\n#66 Loss: 0.003700059140101075\n#67 Loss: 0.003693455131724477\n#68 Loss: 0.003687068819999695\n#69 Loss: 0.0036808873992413282\n#70 Loss: 0.003674915060400963\n#71 Loss: 0.003669134108349681\n#72 Loss: 0.0036635363940149546\n#73 Loss: 0.003658117027953267\n#74 Loss: 0.0036528666969388723\n#75 Loss: 0.0036477812100201845\n#76 Loss: 0.0036428554449230433\n#77 Loss: 0.0036380700767040253\n#78 Loss: 0.0036334358155727386\n#79 Loss: 0.0036289291456341743\n#80 Loss: 0.0036245675291866064\n#81 Loss: 0.003620326519012451\n#82 Loss: 0.0036162110045552254\n#83 Loss: 0.0036122091114521027\n#84 Loss: 0.0036083252634853125\n#85 Loss: 0.0036045443266630173\n#86 Loss: 0.003600866300985217\n#87 Loss: 0.003597299801185727\n#88 Loss: 0.003593817353248596\n#89 Loss: 0.003590435953810811\n#90 Loss: 0.003587141400203109\n#91 Loss: 0.0035839341580867767\n#92 Loss: 0.0035808123648166656\n#93 Loss: 0.0035777695011347532\n#94 Loss: 0.0035747999791055918\n#95 Loss: 0.0035719049628823996\n#96 Loss: 0.003569089574739337\n#97 Loss: 0.003566335653886199\n#98 Loss: 0.003563654376193881\n#99 Loss: 0.003561030374839902\n#100 Loss: 0.003558470867574215\n#101 Loss: 0.003555975155904889\n#102 Loss: 0.003553532063961029\n#103 Loss: 0.003551148809492588\n#104 Loss: 0.003548821434378624\n#105 Loss: 0.00354654504917562\n#106 Loss: 0.0035443175584077835\n#107 Loss: 0.0035421382635831833\n#108 Loss: 0.003540008096024394\n#109 Loss: 0.003537918673828244\n#110 Loss: 0.0035358767490833998\n#111 Loss: 0.003533876733854413\n#112 Loss: 0.00353191583417356\n#113 Loss: 0.0035300003364682198\n#114 Loss: 0.0035281172022223473\n#115 Loss: 0.003526274813339114\n#116 Loss: 0.003524466184899211\n#117 Loss: 0.003522701794281602\n#118 Loss: 0.0035209578927606344\n#119 Loss: 0.003519252873957157\n#120 Loss: 0.003517576726153493\n#121 Loss: 0.0035159389954060316\n#122 Loss: 0.003514323616400361\n#123 Loss: 0.003512740135192871\n#124 Loss: 0.003511182265356183\n#125 Loss: 0.0035096528008580208\n#126 Loss: 0.003508147085085511\n#127 Loss: 0.003506667213514447\n#128 Loss: 0.0035052176099270582\n#129 Loss: 0.0035037861671298742\n#130 Loss: 0.0035023808013647795\n#131 Loss: 0.003500998020172119\n#132 Loss: 0.003499633399769664\n#133 Loss: 0.0034982890356332064\n#134 Loss: 0.003496972844004631\n#135 Loss: 0.003495667828246951\n#136 Loss: 0.003494384465739131\n#137 Loss: 0.003493117867037654\n#138 Loss: 0.0034918717574328184\n#139 Loss: 0.00349065032787621\n#140 Loss: 0.003489432856440544\n#141 Loss: 0.00348824099637568\n#142 Loss: 0.003487059148028493\n#143 Loss: 0.0034858949948102236\n#144 Loss: 0.0034847462084144354\n#145 Loss: 0.0034836139529943466\n#146 Loss: 0.003482494270429015\n#147 Loss: 0.003481391118839383\n#148 Loss: 0.003480295417830348\n#149 Loss: 0.003479210427030921\n#150 Loss: 0.0034781501162797213\n#151 Loss: 0.0034770946949720383\n#152 Loss: 0.0034760525450110435\n#153 Loss: 0.0034750227350741625\n#154 Loss: 0.0034740024711936712\n#155 Loss: 0.003472989657893777\n#156 Loss: 0.003471995471045375\n#157 Loss: 0.003471007803454995\n#158 Loss: 0.003470028517767787\n#159 Loss: 0.0034690583124756813\n#160 Loss: 0.0034680941607803106\n#161 Loss: 0.0034671456087380648\n#162 Loss: 0.003466206369921565\n#163 Loss: 0.00346527062356472\n#164 Loss: 0.003464349312707782\n#165 Loss: 0.00346343033015728\n#166 Loss: 0.003462528809905052\n#167 Loss: 0.0034616210032254457\n#168 Loss: 0.0034607311245054007\n#169 Loss: 0.003459845669567585\n#170 Loss: 0.0034589646384119987\n#171 Loss: 0.0034580936189740896\n#172 Loss: 0.0034572307486087084\n#173 Loss: 0.0034563709050416946\n#174 Loss: 0.0034555227030068636\n#175 Loss: 0.0034546691458672285\n#176 Loss: 0.003453831886872649\n#177 Loss: 0.0034529983531683683\n#178 Loss: 0.0034521690104156733\n#179 Loss: 0.0034513454884290695\n#180 Loss: 0.0034505247604101896\n#181 Loss: 0.0034497110173106194\n#182 Loss: 0.0034489023964852095\n#183 Loss: 0.0034481019247323275\n#184 Loss: 0.0034473019186407328\n#185 Loss: 0.0034465075004845858\n#186 Loss: 0.003445720300078392\n#187 Loss: 0.003444937989115715\n#188 Loss: 0.003444146132096648\n#189 Loss: 0.0034433745313435793\n#190 Loss: 0.0034425982739776373\n#191 Loss: 0.003441832261160016\n#192 Loss: 0.0034410618245601654\n#193 Loss: 0.00344030256383121\n#194 Loss: 0.003439543768763542\n#195 Loss: 0.003438790561631322\n#196 Loss: 0.003438035724684596\n#197 Loss: 0.0034372862428426743\n#198 Loss: 0.0034365393221378326\n#199 Loss: 0.003435796359553933\n#200 Loss: 0.0034350629430264235\n#201 Loss: 0.0034343237057328224\n#202 Loss: 0.0034335919190198183\n#203 Loss: 0.003432856872677803\n#204 Loss: 0.0034321274142712355\n#205 Loss: 0.003431403310969472\n#206 Loss: 0.0034306757152080536\n#207 Loss: 0.003429955570027232\n#208 Loss: 0.0034292389173060656\n#209 Loss: 0.003428521566092968\n#210 Loss: 0.003427801886573434\n#211 Loss: 0.0034270938485860825\n#212 Loss: 0.0034263795241713524\n#213 Loss: 0.0034256717190146446\n#214 Loss: 0.003424961119890213\n#215 Loss: 0.003424258204177022\n#216 Loss: 0.0034235545899719\n#217 Loss: 0.0034228579606860876\n#218 Loss: 0.0034221529494971037\n#219 Loss: 0.0034214600455015898\n#220 Loss: 0.00342076295055449\n#221 Loss: 0.003420066088438034\n#222 Loss: 0.003419372485950589\n#223 Loss: 0.00341868051327765\n#224 Loss: 0.0034179873764514923\n#225 Loss: 0.0034172972664237022\n#226 Loss: 0.003416612045839429\n#227 Loss: 0.003415921004489064\n#228 Loss: 0.0034152355510741472\n#229 Loss: 0.003414546838030219\n#230 Loss: 0.0034138625487685204\n#231 Loss: 0.0034131836146116257\n#232 Loss: 0.0034124988596886396\n#233 Loss: 0.0034118208568543196\n#234 Loss: 0.003411142388358712\n#235 Loss: 0.0034104567021131516\n#236 Loss: 0.003409783588722348\n#237 Loss: 0.0034091060515493155\n#238 Loss: 0.003408431075513363\n#239 Loss: 0.00340775097720325\n#240 Loss: 0.0034070780966430902\n#241 Loss: 0.003406403586268425\n#242 Loss: 0.0034057286102324724\n#243 Loss: 0.0034050524700433016\n#244 Loss: 0.003404380986467004\n#245 Loss: 0.003403702052310109\n#246 Loss: 0.003403039649128914\n#247 Loss: 0.003402368165552616\n#248 Loss: 0.0034016917925328016\n#249 Loss: 0.0034010231029242277\n#250 Loss: 0.0034003565087914467\n#251 Loss: 0.003399685025215149\n#252 Loss: 0.003399015637114644\n#253 Loss: 0.003398348344489932\n#254 Loss: 0.003397681750357151\n#255 Loss: 0.0033970086369663477\n#256 Loss: 0.003396345069631934\n#257 Loss: 0.0033956754487007856\n#258 Loss: 0.003395008621737361\n#259 Loss: 0.003394342726096511\n#260 Loss: 0.003393677296116948\n#261 Loss: 0.003393008140847087\n#262 Loss: 0.0033923406153917313\n#263 Loss: 0.0033916730899363756\n#264 Loss: 0.003391009522601962\n#265 Loss: 0.003390340367332101\n#266 Loss: 0.0033896772656589746\n#267 Loss: 0.0033890074118971825\n#268 Loss: 0.003388339886441827\n#269 Loss: 0.003387679345905781\n#270 Loss: 0.003387008560821414\n#271 Loss: 0.0033863431308418512\n#272 Loss: 0.0033856742084026337\n#273 Loss: 0.00338500807993114\n#274 Loss: 0.003384339390322566\n#275 Loss: 0.0033836790826171637\n#276 Loss: 0.0033830078318715096\n#277 Loss: 0.0033823412377387285\n#278 Loss: 0.0033816781360656023\n#279 Loss: 0.003381013870239258\n#280 Loss: 0.0033803414553403854\n#281 Loss: 0.0033796795178204775\n#282 Loss: 0.0033790096640586853\n#283 Loss: 0.0033783456310629845\n#284 Loss: 0.003377675311639905\n#285 Loss: 0.0033770063892006874\n#286 Loss: 0.0033763365354388952\n#287 Loss: 0.003375668777152896\n#288 Loss: 0.0033750010188668966\n#289 Loss: 0.003374331397935748\n#290 Loss: 0.003373664105311036\n#291 Loss: 0.003372994950041175\n#292 Loss: 0.003372327657416463\n#293 Loss: 0.0033716566395014524\n#294 Loss: 0.003370988415554166\n#295 Loss: 0.0033703141380101442\n#296 Loss: 0.0033696412574499846\n#297 Loss: 0.003368975594639778\n#298 Loss: 0.0033682987559586763\n#299 Loss: 0.003367633791640401\n#300 Loss: 0.003366956254467368\n#301 Loss: 0.0033662898931652308\n#302 Loss: 0.003365609562024474\n#303 Loss: 0.0033649401739239693\n#304 Loss: 0.00336426286958158\n#305 Loss: 0.0033635881263762712\n#306 Loss: 0.003362917574122548\n#307 Loss: 0.0033622391056269407\n#308 Loss: 0.003361570416018367\n#309 Loss: 0.00336089008487761\n#310 Loss: 0.003360213479027152\n#311 Loss: 0.003359538270160556\n#312 Loss: 0.0033588644582778215\n#313 Loss: 0.003358178772032261\n#314 Loss: 0.003357506124302745\n#315 Loss: 0.0033568234648555517\n#316 Loss: 0.003356150584295392\n#317 Loss: 0.0033554695546627045\n#318 Loss: 0.0033547868952155113\n#319 Loss: 0.003354110522195697\n#320 Loss: 0.003353429026901722\n#321 Loss: 0.0033527470659464598\n#322 Loss: 0.00335206207819283\n#323 Loss: 0.0033513836096972227\n#324 Loss: 0.0033507011830806732\n#325 Loss: 0.0033500136341899633\n#326 Loss: 0.00334933097474277\n#327 Loss: 0.003348648315295577\n#328 Loss: 0.003347961464896798\n#329 Loss: 0.0033472811337560415\n#330 Loss: 0.003346593352034688\n#331 Loss: 0.0033459020778536797\n#332 Loss: 0.003345212899148464\n#333 Loss: 0.003344526281580329\n#334 Loss: 0.0033438371028751135\n#335 Loss: 0.0033431502524763346\n#336 Loss: 0.003342463867738843\n#337 Loss: 0.003341772360727191\n#338 Loss: 0.003341085510328412\n#339 Loss: 0.0033403905108571053\n#340 Loss: 0.003339693183079362\n#341 Loss: 0.0033390000462532043\n#342 Loss: 0.0033383071422576904\n#343 Loss: 0.003337620059028268\n#344 Loss: 0.0033369215670973063\n#345 Loss: 0.003336227498948574\n#346 Loss: 0.003335527377203107\n#347 Loss: 0.003334835870191455\n#348 Loss: 0.0033341378439217806\n#349 Loss: 0.0033334344625473022\n#350 Loss: 0.0033327427227050066\n#351 Loss: 0.0033320458605885506\n#352 Loss: 0.0033313401509076357\n#353 Loss: 0.003330640261992812\n#354 Loss: 0.003329940140247345\n#355 Loss: 0.0033292423468083143\n#356 Loss: 0.0033285387326031923\n#357 Loss: 0.0033278383780270815\n#358 Loss: 0.0033271312713623047\n#359 Loss: 0.003326427424326539\n#360 Loss: 0.0033257168252021074\n#361 Loss: 0.003325018333271146\n#362 Loss: 0.0033243035431951284\n#363 Loss: 0.0033236073795706034\n#364 Loss: 0.0033228944521397352\n#365 Loss: 0.0033221894409507513\n#366 Loss: 0.003321480005979538\n#367 Loss: 0.0033207654487341642\n#368 Loss: 0.0033200576435774565\n#369 Loss: 0.0033193447161465883\n#370 Loss: 0.0033186329528689384\n#371 Loss: 0.00331791746430099\n#372 Loss: 0.0033172082621604204\n#373 Loss: 0.0033164990600198507\n#374 Loss: 0.0033157786820083857\n#375 Loss: 0.0033150624949485064\n#376 Loss: 0.003314343513920903\n#377 Loss: 0.0033136282581835985\n#378 Loss: 0.003312913700938225\n#379 Loss: 0.0033121940214186907\n#380 Loss: 0.0033114731777459383\n#381 Loss: 0.003310754196718335\n#382 Loss: 0.00331003125756979\n#383 Loss: 0.003309310181066394\n#384 Loss: 0.0033085874747484922\n#385 Loss: 0.00330786663107574\n#386 Loss: 0.003307138802483678\n#387 Loss: 0.0033064160961657763\n#388 Loss: 0.0033056940883398056\n#389 Loss: 0.003304962068796158\n#390 Loss: 0.0033042412251234055\n#391 Loss: 0.0033035113010555506\n#392 Loss: 0.0033027802128344774\n#393 Loss: 0.00330205331556499\n#394 Loss: 0.0033013224601745605\n#395 Loss: 0.0033005925361067057\n#396 Loss: 0.0032998602837324142\n#397 Loss: 0.003299126634374261\n#398 Loss: 0.003298397408798337\n#399 Loss: 0.0032976623624563217\n#400 Loss: 0.003296930342912674\n#401 Loss: 0.003296192502602935\n#402 Loss: 0.003295455826446414\n#403 Loss: 0.003294718451797962\n#404 Loss: 0.0032939789816737175\n#405 Loss: 0.0032932411413639784\n#406 Loss: 0.003292504698038101\n#407 Loss: 0.003291759639978409\n#408 Loss: 0.0032910217996686697\n#409 Loss: 0.0032902800012379885\n#410 Loss: 0.003289532847702503\n#411 Loss: 0.003288790350779891\n#412 Loss: 0.003288047155365348\n#413 Loss: 0.0032873020973056555\n#414 Loss: 0.0032865542452782393\n#415 Loss: 0.0032858047634363174\n#416 Loss: 0.0032850641291588545\n#417 Loss: 0.0032843127846717834\n#418 Loss: 0.0032835633028298616\n#419 Loss: 0.0032828114926815033\n#420 Loss: 0.0032820655032992363\n#421 Loss: 0.0032813071738928556\n#422 Loss: 0.003280554199591279\n#423 Loss: 0.0032798051834106445\n#424 Loss: 0.003279050812125206\n#425 Loss: 0.0032782929483801126\n#426 Loss: 0.003277535317465663\n#427 Loss: 0.0032767837401479483\n" | |
}, | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": "#428 Loss: 0.0032760247122496367\n#429 Loss: 0.0032752633560448885\n#430 Loss: 0.003274502931162715\n#431 Loss: 0.0032737450674176216\n#432 Loss: 0.00327298603951931\n#433 Loss: 0.0032722223550081253\n#434 Loss: 0.0032714586704969406\n#435 Loss: 0.003270694287493825\n#436 Loss: 0.0032699313014745712\n#437 Loss: 0.0032691641245037317\n#438 Loss: 0.003268399043008685\n#439 Loss: 0.0032676327973604202\n#440 Loss: 0.0032668637577444315\n#441 Loss: 0.003266094485297799\n#442 Loss: 0.003265324980020523\n#443 Loss: 0.003264554077759385\n#444 Loss: 0.0032637789845466614\n#445 Loss: 0.0032630085479468107\n#446 Loss: 0.0032622350845485926\n#447 Loss: 0.003261463949456811\n#448 Loss: 0.003260687692090869\n#449 Loss: 0.003259907243773341\n#450 Loss: 0.0032591382041573524\n#451 Loss: 0.00325835426338017\n#452 Loss: 0.0032575754448771477\n#453 Loss: 0.00325679499655962\n#454 Loss: 0.003256014781072736\n#455 Loss: 0.0032552406191825867\n#456 Loss: 0.003254452720284462\n#457 Loss: 0.003253670409321785\n#458 Loss: 0.003252884605899453\n#459 Loss: 0.003252096474170685\n#460 Loss: 0.0032513150945305824\n#461 Loss: 0.0032505306880921125\n#462 Loss: 0.0032497402280569077\n#463 Loss: 0.0032489539589732885\n#464 Loss: 0.0032481641974300146\n#465 Loss: 0.0032473690807819366\n#466 Loss: 0.0032465795520693064\n#467 Loss: 0.0032457837369292974\n#468 Loss: 0.003244992345571518\n#469 Loss: 0.0032441976945847273\n#470 Loss: 0.0032434065360575914\n#471 Loss: 0.003242606297135353\n#472 Loss: 0.003241809783503413\n#473 Loss: 0.003241008846089244\n#474 Loss: 0.003240209072828293\n#475 Loss: 0.003239411860704422\n#476 Loss: 0.003238610690459609\n#477 Loss: 0.003237810917198658\n#478 Loss: 0.003237013006582856\n#479 Loss: 0.0032361994963139296\n#480 Loss: 0.0032353997230529785\n#481 Loss: 0.0032345924992114305\n#482 Loss: 0.0032337859738618135\n#483 Loss: 0.0032329827081412077\n#484 Loss: 0.0032321717590093613\n#485 Loss: 0.0032313622068613768\n#486 Loss: 0.003230554750189185\n#487 Loss: 0.0032297431025654078\n#488 Loss: 0.003228927031159401\n#489 Loss: 0.0032281186431646347\n#490 Loss: 0.0032273007091134787\n#491 Loss: 0.00322648580186069\n#492 Loss: 0.003225669264793396\n#493 Loss: 0.003224849933758378\n#494 Loss: 0.0032240375876426697\n#495 Loss: 0.0032232182566076517\n#496 Loss: 0.0032223984599113464\n#497 Loss: 0.0032215798273682594\n#498 Loss: 0.0032207544427365065\n#499 Loss: 0.0032199304550886154\n#500 Loss: 0.003219104604795575\n#501 Loss: 0.0032182789873331785\n#502 Loss: 0.003217454068362713\n#503 Loss: 0.003216634737327695\n#504 Loss: 0.0032158007379621267\n#505 Loss: 0.0032149699982255697\n#506 Loss: 0.003214145079255104\n#507 Loss: 0.003213308984413743\n#508 Loss: 0.003212481504306197\n#509 Loss: 0.003211650298908353\n#510 Loss: 0.0032108165323734283\n#511 Loss: 0.0032099836971610785\n#512 Loss: 0.0032091473694890738\n#513 Loss: 0.0032083075493574142\n#514 Loss: 0.003207470290362835\n#515 Loss: 0.003206632100045681\n#516 Loss: 0.0032057908829301596\n#517 Loss: 0.0032049540895968676\n#518 Loss: 0.0032041065860539675\n#519 Loss: 0.0032032709568738937\n#520 Loss: 0.0032024222891777754\n#521 Loss: 0.003201577812433243\n#522 Loss: 0.0032007291447371244\n#523 Loss: 0.0031998921185731888\n#524 Loss: 0.00319903832860291\n#525 Loss: 0.003198188031092286\n#526 Loss: 0.0031973449513316154\n#527 Loss: 0.0031964874360710382\n#528 Loss: 0.003195636672899127\n#529 Loss: 0.003194783814251423\n#530 Loss: 0.003193933516740799\n#531 Loss: 0.0031930769328027964\n#532 Loss: 0.0031922173220664263\n#533 Loss: 0.003191363764926791\n#534 Loss: 0.0031905078794807196\n#535 Loss: 0.0031896450091153383\n#536 Loss: 0.003188781440258026\n#537 Loss: 0.00318792462348938\n#538 Loss: 0.0031870603561401367\n#539 Loss: 0.003186197718605399\n#540 Loss: 0.0031853339169174433\n#541 Loss: 0.003184465691447258\n#542 Loss: 0.003183603985235095\n#543 Loss: 0.003182734362781048\n#544 Loss: 0.0031818647403270006\n#545 Loss: 0.0031809990759938955\n#546 Loss: 0.003180120373144746\n#547 Loss: 0.003179255174472928\n#548 Loss: 0.0031783783342689276\n#549 Loss: 0.0031775059178471565\n#550 Loss: 0.0031766321044415236\n#551 Loss: 0.003175758756697178\n#552 Loss: 0.0031748758628964424\n#553 Loss: 0.003174002980813384\n#554 Loss: 0.003173121949657798\n#555 Loss: 0.0031722437124699354\n#556 Loss: 0.003171361982822418\n#557 Loss: 0.0031704737339168787\n#558 Loss: 0.0031695952638983727\n#559 Loss: 0.0031687107402831316\n#560 Loss: 0.0031678222585469484\n#561 Loss: 0.0031669337768107653\n#562 Loss: 0.003166050650179386\n#563 Loss: 0.0031651612371206284\n#564 Loss: 0.0031642718240618706\n#565 Loss: 0.003163382178172469\n#566 Loss: 0.003162488341331482\n#567 Loss: 0.0031615979969501495\n#568 Loss: 0.0031607013661414385\n#569 Loss: 0.003159803571179509\n#570 Loss: 0.0031589085701853037\n#571 Loss: 0.0031580140348523855\n#572 Loss: 0.003157111583277583\n#573 Loss: 0.003156212391331792\n#574 Loss: 0.0031553118024021387\n#575 Loss: 0.003154413541778922\n#576 Loss: 0.003153505502268672\n#577 Loss: 0.0031526004895567894\n#578 Loss: 0.0031516961753368378\n#579 Loss: 0.0031507927924394608\n#580 Loss: 0.0031498793978244066\n#581 Loss: 0.003148973686620593\n#582 Loss: 0.0031480658799409866\n#583 Loss: 0.0031471506226807833\n#584 Loss: 0.0031462411861866713\n#585 Loss: 0.003145330585539341\n#586 Loss: 0.0031444195192307234\n#587 Loss: 0.0031434998381882906\n#588 Loss: 0.003142584813758731\n#589 Loss: 0.0031416688580065966\n#590 Loss: 0.003140748245641589\n#591 Loss: 0.0031398283317685127\n#592 Loss: 0.003138909814879298\n#593 Loss: 0.0031379845459014177\n#594 Loss: 0.003137064166367054\n#595 Loss: 0.003136137267574668\n#596 Loss: 0.003135212929919362\n#597 Loss: 0.0031342841684818268\n#598 Loss: 0.0031333593651652336\n#599 Loss: 0.0031324296724051237\n#600 Loss: 0.00313149974681437\n#601 Loss: 0.00313056749291718\n#602 Loss: 0.0031296375673264265\n#603 Loss: 0.0031287011224776506\n#604 Loss: 0.0031277656089514494\n#605 Loss: 0.0031268373131752014\n#606 Loss: 0.00312589923851192\n#607 Loss: 0.0031249576713889837\n#608 Loss: 0.0031240256503224373\n#609 Loss: 0.003123080125078559\n#610 Loss: 0.0031221371609717607\n#611 Loss: 0.003121196059510112\n#612 Loss: 0.003120253561064601\n#613 Loss: 0.003119305009022355\n#614 Loss: 0.003118356456980109\n#615 Loss: 0.0031174139585345984\n#616 Loss: 0.003116464475169778\n#617 Loss: 0.003115519881248474\n#618 Loss: 0.0031145683024078608\n#619 Loss: 0.003113616956397891\n#620 Loss: 0.003112660488113761\n#621 Loss: 0.00311170588247478\n#622 Loss: 0.003110748715698719\n#623 Loss: 0.0031097966711968184\n#624 Loss: 0.0031088388059288263\n#625 Loss: 0.003107876516878605\n#626 Loss: 0.003106920048594475\n#627 Loss: 0.003105957293882966\n#628 Loss: 0.0031049950048327446\n#629 Loss: 0.0031040336471050978\n#630 Loss: 0.0031030664686113596\n#631 Loss: 0.0031021004542708397\n#632 Loss: 0.0031011318787932396\n#633 Loss: 0.0031001653987914324\n#634 Loss: 0.003099195659160614\n#635 Loss: 0.0030982240568846464\n#636 Loss: 0.0030972573440521955\n#637 Loss: 0.003096279688179493\n#638 Loss: 0.0030953073874115944\n#639 Loss: 0.003094329498708248\n#640 Loss: 0.0030933513771742582\n#641 Loss: 0.003092374885454774\n#642 Loss: 0.0030913955997675657\n#643 Loss: 0.003090412588790059\n#644 Loss: 0.0030894335359334946\n#645 Loss: 0.00308845192193985\n#646 Loss: 0.003087469143792987\n#647 Loss: 0.00308648101054132\n#648 Loss: 0.0030854956712573767\n#649 Loss: 0.003084515919908881\n#650 Loss: 0.003083521733060479\n#651 Loss: 0.003082533134147525\n#652 Loss: 0.003081537550315261\n#653 Loss: 0.0030805484857410192\n#654 Loss: 0.003079561283811927\n#655 Loss: 0.003078560112044215\n#656 Loss: 0.0030775635968893766\n#657 Loss: 0.0030765633564442396\n#658 Loss: 0.003075564978644252\n#659 Loss: 0.0030745731201022863\n#660 Loss: 0.003073570318520069\n#661 Loss: 0.0030725656542927027\n#662 Loss: 0.003071562387049198\n#663 Loss: 0.0030705605167895555\n#664 Loss: 0.003069555386900902\n#665 Loss: 0.0030685439705848694\n#666 Loss: 0.0030675397720187902\n#667 Loss: 0.0030665274243801832\n#668 Loss: 0.0030655162408947945\n#669 Loss: 0.003064502263441682\n#670 Loss: 0.003063490381464362\n#671 Loss: 0.003062476636841893\n#672 Loss: 0.0030614605639129877\n#673 Loss: 0.0030604430940002203\n#674 Loss: 0.0030594307463616133\n#675 Loss: 0.0030584093183279037\n#676 Loss: 0.0030573883559554815\n#677 Loss: 0.003056370420381427\n#678 Loss: 0.00305534596554935\n#679 Loss: 0.003054314525797963\n#680 Loss: 0.0030532926321029663\n#681 Loss: 0.003052267013117671\n#682 Loss: 0.003051239298656583\n#683 Loss: 0.0030502071604132652\n#684 Loss: 0.003049177350476384\n#685 Loss: 0.0030481452122330666\n#686 Loss: 0.003047112375497818\n#687 Loss: 0.0030460793059319258\n#688 Loss: 0.003045039251446724\n#689 Loss: 0.0030440036207437515\n#690 Loss: 0.0030429682228714228\n#691 Loss: 0.003041934221982956\n#692 Loss: 0.0030408899765461683\n#693 Loss: 0.003039844334125519\n#694 Loss: 0.003038800088688731\n#695 Loss: 0.003037760965526104\n#696 Loss: 0.003036712994799018\n#697 Loss: 0.0030356652569025755\n#698 Loss: 0.003034620312973857\n#699 Loss: 0.003033566055819392\n#700 Loss: 0.003032522276043892\n#701 Loss: 0.0030314645264297724\n#702 Loss: 0.003030410734936595\n#703 Loss: 0.003029354615136981\n#704 Loss: 0.0030282975640147924\n#705 Loss: 0.003027241677045822\n#706 Loss: 0.003026185557246208\n#707 Loss: 0.0030251287389546633\n#708 Loss: 0.003024066099897027\n#709 Loss: 0.003023003926500678\n#710 Loss: 0.00302194245159626\n#711 Loss: 0.00302087957970798\n#712 Loss: 0.003019809490069747\n#713 Loss: 0.003018738003447652\n#714 Loss: 0.0030176721047610044\n#715 Loss: 0.003016604110598564\n#716 Loss: 0.003015534020960331\n#717 Loss: 0.0030144602060317993\n#718 Loss: 0.003013385459780693\n#719 Loss: 0.0030123144388198853\n#720 Loss: 0.0030112352687865496\n#721 Loss: 0.003010154701769352\n#722 Loss: 0.00300907832570374\n#723 Loss: 0.0030079965945333242\n#724 Loss: 0.0030069167260080576\n#725 Loss: 0.003005831502377987\n#726 Loss: 0.00300474651157856\n#727 Loss: 0.0030036605894565582\n#728 Loss: 0.0030025721061974764\n#729 Loss: 0.0030014861840754747\n#730 Loss: 0.0030003965366631746\n#731 Loss: 0.0029993082862347364\n#732 Loss: 0.0029982172418385744\n#733 Loss: 0.0029971261974424124\n#734 Loss: 0.002996026771143079\n#735 Loss: 0.0029949296731501818\n#736 Loss: 0.0029938339721411467\n#737 Loss: 0.0029927378054708242\n#738 Loss: 0.002991635352373123\n#739 Loss: 0.002990535693243146\n#740 Loss: 0.002989433705806732\n#741 Loss: 0.0029883303213864565\n#742 Loss: 0.002987225539982319\n#743 Loss: 0.002986123086884618\n#744 Loss: 0.002985007129609585\n#745 Loss: 0.0029839053750038147\n#746 Loss: 0.002982793375849724\n#747 Loss: 0.0029816769529134035\n#748 Loss: 0.0029805738013237715\n#749 Loss: 0.0029794592410326004\n#750 Loss: 0.0029783404897898436\n#751 Loss: 0.0029772240668535233\n#752 Loss: 0.0029761074110865593\n#753 Loss: 0.0029749858658760786\n#754 Loss: 0.002973862923681736\n#755 Loss: 0.0029727399814873934\n#756 Loss: 0.0029716233257204294\n#757 Loss: 0.002970491535961628\n#758 Loss: 0.002969368128105998\n#759 Loss: 0.002968237968161702\n#760 Loss: 0.0029671108350157738\n#761 Loss: 0.002965979976579547\n#762 Loss: 0.002964847953990102\n#763 Loss: 0.0029637161642313004\n#764 Loss: 0.0029625834431499243\n#765 Loss: 0.0029614476952701807\n#766 Loss: 0.002960308687761426\n#767 Loss: 0.0029591761995106936\n#768 Loss: 0.0029580320697277784\n#769 Loss: 0.002956897020339966\n#770 Loss: 0.002955755917355418\n#771 Loss: 0.002954610623419285\n#772 Loss: 0.0029534606728702784\n#773 Loss: 0.002952314680442214\n#774 Loss: 0.00295117381028831\n#775 Loss: 0.002950020134449005\n#776 Loss: 0.0029488790314644575\n#777 Loss: 0.0029477218631654978\n#778 Loss: 0.002946570748463273\n#779 Loss: 0.0029454149771481752\n#780 Loss: 0.002944263396784663\n#781 Loss: 0.0029431015718728304\n#782 Loss: 0.002941948128864169\n#783 Loss: 0.002940784441307187\n#784 Loss: 0.002939627505838871\n#785 Loss: 0.0029384614899754524\n#786 Loss: 0.002937304088845849\n#787 Loss: 0.002936137607321143\n#788 Loss: 0.00293497066013515\n#789 Loss: 0.0029338039457798004\n#790 Loss: 0.0029326353687793016\n#791 Loss: 0.002931463299319148\n#792 Loss: 0.002930295653641224\n#793 Loss: 0.002929119626060128\n#794 Loss: 0.0029279489535838366\n#795 Loss: 0.0029267731588333845\n#796 Loss: 0.002925596199929714\n#797 Loss: 0.0029244180768728256\n#798 Loss: 0.0029232369270175695\n#799 Loss: 0.002922057406976819\n#800 Loss: 0.0029208732303231955\n#801 Loss: 0.002919693710282445\n#802 Loss: 0.0029185067396610975\n#803 Loss: 0.0029173188377171755\n#804 Loss: 0.002916131867095828\n#805 Loss: 0.0029149428009986877\n#806 Loss: 0.0029137544333934784\n#807 Loss: 0.002912563504651189\n#808 Loss: 0.0029113739728927612\n#809 Loss: 0.0029101762920618057\n#810 Loss: 0.002908983500674367\n#811 Loss: 0.0029077886138111353\n#812 Loss: 0.002906588139012456\n#813 Loss: 0.002905389992520213\n#814 Loss: 0.0029041890520602465\n#815 Loss: 0.0029029862489551306\n#816 Loss: 0.0029017834458500147\n#817 Loss: 0.0029005787800997496\n#818 Loss: 0.00289937318302691\n#819 Loss: 0.0028981640934944153\n#820 Loss: 0.002896954072639346\n#821 Loss: 0.0028957456815987825\n#822 Loss: 0.002894533798098564\n#823 Loss: 0.002893322380259633\n#824 Loss: 0.002892111660912633\n#825 Loss: 0.0028908916283398867\n#826 Loss: 0.002889677183702588\n#827 Loss: 0.0028884613420814276\n#828 Loss: 0.0028872406110167503\n#829 Loss: 0.0028860189486294985\n#830 Loss: 0.002884797053411603\n#831 Loss: 0.002883573994040489\n#832 Loss: 0.002882350469008088\n#833 Loss: 0.0028811234515160322\n#834 Loss: 0.0028798982966691256\n#835 Loss: 0.002878667786717415\n#836 Loss: 0.0028774356469511986\n#837 Loss: 0.0028762081637978554\n#838 Loss: 0.0028749722987413406\n#839 Loss: 0.002873735735192895\n#840 Loss: 0.0028725003357976675\n#841 Loss: 0.0028712654020637274\n#842 Loss: 0.002870030701160431\n#843 Loss: 0.002868788316845894\n#844 Loss: 0.0028675440698862076\n#845 Loss: 0.0028663061093539\n#846 Loss: 0.00286506419070065\n#847 Loss: 0.0028638169169425964\n#848 Loss: 0.002862572902813554\n#849 Loss: 0.002861322835087776\n#850 Loss: 0.002860073000192642\n#851 Loss: 0.002858824096620083\n#852 Loss: 0.002857571467757225\n#853 Loss: 0.0028563232626765966\n#854 Loss: 0.0028550655115395784\n#855 Loss: 0.0028538068290799856\n#856 Loss: 0.0028525551315397024\n#857 Loss: 0.0028512950520962477\n#858 Loss: 0.0028500359039753675\n#859 Loss: 0.0028487779200077057\n#860 Loss: 0.002847515745088458\n#861 Loss: 0.002846249146386981\n" | |
}, | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": "#862 Loss: 0.0028449876699596643\n#863 Loss: 0.002843722002580762\n#864 Loss: 0.0028424530755728483\n#865 Loss: 0.002841183915734291\n#866 Loss: 0.0028399156872183084\n#867 Loss: 0.0028386481571942568\n#868 Loss: 0.0028373694512993097\n#869 Loss: 0.002836097264662385\n#870 Loss: 0.0028348213527351618\n#871 Loss: 0.002833544509485364\n#872 Loss: 0.002832265803590417\n#873 Loss: 0.00283098965883255\n#874 Loss: 0.0028297044336795807\n#875 Loss: 0.0028284245636314154\n#876 Loss: 0.0028271395713090897\n#877 Loss: 0.002825857838615775\n#878 Loss: 0.0028245756402611732\n#879 Loss: 0.0028232894837856293\n#880 Loss: 0.0028219956438988447\n#881 Loss: 0.002820705994963646\n#882 Loss: 0.00281941588036716\n#883 Loss: 0.0028181213419884443\n#884 Loss: 0.0028168242424726486\n#885 Loss: 0.0028155359905213118\n#886 Loss: 0.0028142407536506653\n#887 Loss: 0.002812942722812295\n#888 Loss: 0.002811641665175557\n#889 Loss: 0.002810343401506543\n#890 Loss: 0.002809036523103714\n#891 Loss: 0.0028077370952814817\n#892 Loss: 0.0028064288198947906\n#893 Loss: 0.0028051265981048346\n#894 Loss: 0.0028038229793310165\n#895 Loss: 0.002802510978654027\n#896 Loss: 0.0028012089896947145\n#897 Loss: 0.0027998953592032194\n#898 Loss: 0.0027985796332359314\n#899 Loss: 0.0027972653042525053\n#900 Loss: 0.0027959533035755157\n#901 Loss: 0.002794637344777584\n#902 Loss: 0.0027933220844715834\n#903 Loss: 0.002792001934722066\n#904 Loss: 0.0027906829491257668\n#905 Loss: 0.0027893653605133295\n#906 Loss: 0.002788037760183215\n#907 Loss: 0.0027867192402482033\n#908 Loss: 0.002785389544442296\n#909 Loss: 0.0027840666007250547\n#910 Loss: 0.0027827343437820673\n#911 Loss: 0.002781406044960022\n#912 Loss: 0.0027800805401057005\n#913 Loss: 0.002778751077130437\n#914 Loss: 0.0027774174232035875\n#915 Loss: 0.002776080509647727\n#916 Loss: 0.002774745924398303\n#917 Loss: 0.0027734122704714537\n#918 Loss: 0.0027720772195607424\n#919 Loss: 0.002770734718069434\n#920 Loss: 0.002769396873190999\n#921 Loss: 0.0027680539060384035\n#922 Loss: 0.002766713500022888\n#923 Loss: 0.002765367738902569\n#924 Loss: 0.0027640226762741804\n#925 Loss: 0.00276267621666193\n#926 Loss: 0.0027613311540335417\n#927 Loss: 0.002759986324235797\n#928 Loss: 0.002758630318567157\n#929 Loss: 0.002757279435172677\n#930 Loss: 0.0027559271547943354\n#931 Loss: 0.0027545762713998556\n#932 Loss: 0.00275321863591671\n#933 Loss: 0.002751860534772277\n#934 Loss: 0.002750506391748786\n#935 Loss: 0.0027491494547575712\n#936 Loss: 0.002747784135863185\n#937 Loss: 0.0027464244049042463\n#938 Loss: 0.0027450602501630783\n#939 Loss: 0.0027436965610831976\n#940 Loss: 0.002742332173511386\n#941 Loss: 0.002740968018770218\n#942 Loss: 0.002739601070061326\n#943 Loss: 0.002738232957199216\n#944 Loss: 0.0027368590235710144\n#945 Loss: 0.0027354888152331114\n#946 Loss: 0.0027341179084032774\n#947 Loss: 0.0027327474672347307\n#948 Loss: 0.0027313686441630125\n#949 Loss: 0.0027299963403493166\n#950 Loss: 0.002728617750108242\n#951 Loss: 0.0027272403240203857\n#952 Loss: 0.0027258608024567366\n#953 Loss: 0.00272447825409472\n#954 Loss: 0.0027230980340391397\n#955 Loss: 0.0027217131573706865\n#956 Loss: 0.002720329212024808\n#957 Loss: 0.00271894340403378\n#958 Loss: 0.0027175631839782\n#959 Loss: 0.002716168062761426\n#960 Loss: 0.0027147841174155474\n#961 Loss: 0.002713392488658428\n#962 Loss: 0.0027120031882077456\n#963 Loss: 0.0027106080669909716\n#964 Loss: 0.0027092124801129103\n#965 Loss: 0.0027078192215412855\n#966 Loss: 0.0027064234018325806\n#967 Loss: 0.0027050257194787264\n#968 Loss: 0.0027036250103265047\n#969 Loss: 0.002702222438529134\n#970 Loss: 0.0027008235920220613\n#971 Loss: 0.0026994228828698397\n#972 Loss: 0.0026980217080563307\n#973 Loss: 0.002696613548323512\n#974 Loss: 0.00269521027803421\n#975 Loss: 0.002693809801712632\n#976 Loss: 0.0026923955883830786\n#977 Loss: 0.0026909869629889727\n#978 Loss: 0.002689572051167488\n#979 Loss: 0.002688162960112095\n#980 Loss: 0.0026867559645324945\n#981 Loss: 0.002685340354219079\n#982 Loss: 0.002683921717107296\n#983 Loss: 0.0026825095992535353\n#984 Loss: 0.0026810932904481888\n#985 Loss: 0.0026796746533364058\n#986 Loss: 0.0026782590430229902\n#987 Loss: 0.0026768369134515524\n#988 Loss: 0.0026754147838801146\n#989 Loss: 0.0026739935856312513\n#990 Loss: 0.002672569826245308\n#991 Loss: 0.0026711428072303534\n#992 Loss: 0.002669712295755744\n#993 Loss: 0.002668286906555295\n#994 Loss: 0.0026668605860322714\n#995 Loss: 0.0026654365938156843\n#996 Loss: 0.002664001425728202\n#997 Loss: 0.002662571147084236\n#998 Loss: 0.0026611359789967537\n#999 Loss: 0.002659703604876995\nPredicted data based on trained weights: \nInput (scaled): \ntensor([0.5000, 1.0000])\nOutput: \ntensor([0.9393])\n" | |
} | |
] | |
}, | |
{ | |
"metadata": { | |
"trusted": true | |
}, | |
"cell_type": "code", | |
"source": "", | |
"execution_count": null, | |
"outputs": [] | |
} | |
], | |
"metadata": { | |
"gist": { | |
"id": "", | |
"data": { | |
"description": "Neural Network from Scratch with PyTorch", | |
"public": true | |
} | |
}, | |
"kernelspec": { | |
"name": "python3", | |
"display_name": "Python 3", | |
"language": "python" | |
}, | |
"language_info": { | |
"name": "python", | |
"version": "3.6.8", | |
"mimetype": "text/x-python", | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"pygments_lexer": "ipython3", | |
"nbconvert_exporter": "python", | |
"file_extension": ".py" | |
} | |
}, | |
"nbformat": 4, | |
"nbformat_minor": 2 | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment