Skip to content

Instantly share code, notes, and snippets.

@mavillan
Last active May 16, 2023 17:11
Show Gist options
  • Save mavillan/de438ba936920fd1d5c540234f71b2e0 to your computer and use it in GitHub Desktop.
Save mavillan/de438ba936920fd1d5c540234f71b2e0 to your computer and use it in GitHub Desktop.
soft_ordering_1dcnn.py
import torch
from torch import nn
import pytorch_lightning as pl
class SoftOrdering1DCNN(pl.LightningModule):
def __init__(self, input_dim, output_dim, sign_size=32, cha_input=16, cha_hidden=32,
K=2, dropout_input=0.2, dropout_hidden=0.2, dropout_output=0.2):
super().__init__()
hidden_size = sign_size*cha_input
sign_size1 = sign_size
sign_size2 = sign_size//2
output_size = (sign_size//4) * cha_hidden
self.hidden_size = hidden_size
self.cha_input = cha_input
self.cha_hidden = cha_hidden
self.K = K
self.sign_size1 = sign_size1
self.sign_size2 = sign_size2
self.output_size = output_size
self.dropout_input = dropout_input
self.dropout_hidden = dropout_hidden
self.dropout_output = dropout_output
self.batch_norm1 = nn.BatchNorm1d(input_dim)
self.dropout1 = nn.Dropout(dropout_input)
dense1 = nn.Linear(input_dim, hidden_size, bias=False)
self.dense1 = nn.utils.weight_norm(dense1)
# 1st conv layer
self.batch_norm_c1 = nn.BatchNorm1d(cha_input)
conv1 = conv1 = nn.Conv1d(
cha_input,
cha_input*K,
kernel_size=5,
stride = 1,
padding=2,
groups=cha_input,
bias=False)
self.conv1 = nn.utils.weight_norm(conv1, dim=None)
self.ave_po_c1 = nn.AdaptiveAvgPool1d(output_size = sign_size2)
# 2nd conv layer
self.batch_norm_c2 = nn.BatchNorm1d(cha_input*K)
self.dropout_c2 = nn.Dropout(dropout_hidden)
conv2 = nn.Conv1d(
cha_input*K,
cha_hidden,
kernel_size=3,
stride=1,
padding=1,
bias=False)
self.conv2 = nn.utils.weight_norm(conv2, dim=None)
# 3rd conv layer
self.batch_norm_c3 = nn.BatchNorm1d(cha_hidden)
self.dropout_c3 = nn.Dropout(dropout_hidden)
conv3 = nn.Conv1d(
cha_hidden,
cha_hidden,
kernel_size=3,
stride=1,
padding=1,
bias=False)
self.conv3 = nn.utils.weight_norm(conv3, dim=None)
# 4th conv layer
self.batch_norm_c4 = nn.BatchNorm1d(cha_hidden)
conv4 = nn.Conv1d(
cha_hidden,
cha_hidden,
kernel_size=5,
stride=1,
padding=2,
groups=cha_hidden,
bias=False)
self.conv4 = nn.utils.weight_norm(conv4, dim=None)
self.avg_po_c4 = nn.AvgPool1d(kernel_size=4, stride=2, padding=1)
self.flt = nn.Flatten()
self.batch_norm2 = nn.BatchNorm1d(output_size)
self.dropout2 = nn.Dropout(dropout_output)
dense2 = nn.Linear(output_size, output_dim, bias=False)
self.dense2 = nn.utils.weight_norm(dense2)
self.loss = nn.BCEWithLogitsLoss()
def forward(self, x):
x = self.batch_norm1(x)
x = self.dropout1(x)
x = nn.functional.celu(self.dense1(x))
x = x.reshape(x.shape[0], self.cha_input, self.sign_size1)
x = self.batch_norm_c1(x)
x = nn.functional.relu(self.conv1(x))
x = self.ave_po_c1(x)
x = self.batch_norm_c2(x)
x = self.dropout_c2(x)
x = nn.functional.relu(self.conv2(x))
x_s = x
x = self.batch_norm_c3(x)
x = self.dropout_c3(x)
x = nn.functional.relu(self.conv3(x))
x = self.batch_norm_c4(x)
x = self.conv4(x)
x = x + x_s
x = nn.functional.relu(x)
x = self.avg_po_c4(x)
x = self.flt(x)
x = self.batch_norm2(x)
x = self.dropout2(x)
x = self.dense2(x)
return x
@fnasimi
Copy link

fnasimi commented Jul 25, 2021

Hi
thanks for this useful code. I have a question: how can I implement this to be used for multiclassification(4 class) problems?

@mavillan
Copy link
Author

mavillan commented Aug 2, 2021

Hi @fnasimi for a 4 classes classification problem you should set the output_dim=4, so that the output of the model is a vector of length 4, were each entry represents the probability to belong to class i (i = 1, 2, 3, 4).

Then for the loss function you should use the BCEWithLogitLoss for multiclass: https://discuss.pytorch.org/t/bceloss-for-multiclass-problem/36675/6

@mohiburnabil
Copy link

What to do next after this code. Can you provide me a sample of training a model for a dataset?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment