Skip to content

Instantly share code, notes, and snippets.

"""
Script to update parameters and the optimizer on the fly.
It is not recommended to use this approach.
This script just shows the disadvantages using this approach.
@author: ptrblck
"""
import torch
import torch.nn as nn
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.utils.data import Dataset
from torch.utils.data.dataloader import DataLoader
from __future__ import print_function
import argparse
import os
import random
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data
from copy import deepcopy
import torch
import matplotlib
matplotlib.use("agg")
from torch.backends import cudnn
from apex import amp
import argparse
from torch import cuda
from torch import nn
from urllib import request
import torch
import torch.nn as nn
import torch.optim as optim
from apex import amp
class SiameseTower(nn.Module):
def __init__(self, inplanes=3, planes=32,blocks=3):
super(SiameseTower, self).__init__()
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader, TensorDataset
from apex import amp
from torchcontrib.optim import SWA
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from apex import amp
class MyModel(nn.Module):
import torch
import torch.nn as nn
import pandas as pd
import time
torch.backends.cudnn.deterministic = False
torch.backends.cudnn.benchmark = True
import torch
import torch.nn as nn
def print_types(input, bn, output):
print('input.type(): {}'.format(input.type()))
if bn.weight is not None:
print('bn.weight.type(): {}'.format(bn.weight.type()))
else:
import torch
import time
torch.backends.cudnn.benchmark = True
# 1a)
I, J, K = 64, 1024, 1024
A = torch.randn(I, J, device='cuda', dtype=torch.half)
B = torch.randn(J, K, device='cuda', dtype=torch.half)