>>>inspect(listofcollections)
'''
0
list `[1,2,3,...]` of len `1345`
1
list `[np.ndarray([[1,2],[3,2],...]) of size (1345x2)
np.ndarray([list `[2,3,4,...]` of len `7`, list `[2,2,1,...]` of len `15`])
torch.tensor([[[0.0000,...]]] of size (1345x1x100x100))
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
__all__ = ['B','BB','C','choose','crop_from_bb','cv2', 'dumpdill','df2bbs', 'FName','glob','Glob', | |
'line','loaddill','logger','extn', 'np', 'now','os','pd','parent','Path','pdb', | |
'plt','puttext','randint', 'rand', 'read','rect','see','show','stem','tqdm','Tqdm'] | |
import cv2, glob, numpy as np, pandas as pd, tqdm, os | |
import matplotlib#; matplotlib.use('Agg') | |
import matplotlib.pyplot as plt | |
import matplotlib.patheffects as path_effects | |
import pdb, datetime, dill | |
from pathlib import Path |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"snippets" : [ | |
{ | |
"name" : "example", | |
"code" : [ | |
"# This is an example snippet!", | |
"# To create your own, add a new snippet block to the", | |
"# snippets.json file in your jupyter nbextensions directory:", | |
"# /nbextensions/snippets/snippets.json", | |
"import this" |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
git clone https://github.com/sizhky/classical-python-problems | |
git status | |
git add folder1 folder2 folder3 file1 file2 file* | |
git status | |
git commit -m "1.1.1" | |
git push origin master |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# !jbck train.ipynb -> folder.of.train.ipynb/18:[email protected] | |
ip="train.ipynb" | |
!echo {ip} | |
!mkdir -p folder.of.{ip} | |
!jupyter nbconvert {ip} | |
!mv {stem(ip)}.html folder.of.{ip}/$(date +%H:%M@%d%b%y) | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
i = 0 | |
while True: | |
try: | |
i += 1 | |
if i == 10: i/0 | |
except ZeroDivisionError: | |
print('Go Free!!') | |
break |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import time | |
info = lambda report: '\t'.join([f'{k}: {v:.3f}' for k,v in report.items()]) | |
def report_metrics(pos, **report): | |
elapsed = time.time()-start | |
end = report.pop('end','\n') | |
elapsed = '\t({:.2f}s - {:.2f}s remaining)'.format(time.time()-start, ((n_epochs-pos)/pos)*elapsed) | |
current_iteration = f'EPOCH: {pos:.3f}\t' | |
print(current_iteration + info(report) + elapsed, end=end) | |
- Deep networks suffer the problem of vanishing gradient, i.e., gradients near to the loss layer are much larger and as they get distributed to the previous layers backward, the magnitude of gradients diminish.
- Inception tackles this problem by using the concept of auxiliary losses where (see fig) it branches out convolution outputs from 4a and 4d into their respective mini classification heads that employ standard softmax - predicting on the same classes as the main task
- Hence during training, there will be a strong gradient flow from loss:0 to adjust layers 4a (and preceding layers) more aggressively. Similar is the case with layers "4d,4c and 4b" where the gradients are high from "loss:1" and for layers 4e onwards which are closer to "loss:2".
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
8x+4y+z = 96 | |
3x+2y+4z = 53 | |
9x+2y+z = 119 |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.