Skip to content

Instantly share code, notes, and snippets.

View sadimanna's full-sized avatar
🌏
Submitted Ph.D. thesis a few days ago

Siladittya Manna sadimanna

🌏
Submitted Ph.D. thesis a few days ago
View GitHub Profile
@sadimanna
sadimanna / check_gpu_utilization.py
Created April 8, 2025 10:18
This script can be used to check how much memory is being utilized on each GPU of every node
import subprocess
# import paramiko
import getpass
from collections import defaultdict
import time, os
from typing import List, Dict, Union
import sys
SSH_TIMEOUT=10
PLINK_PATH=r'"C:\Program Files\PuTTY\plink.EXE"'
@sadimanna
sadimanna / dummyce.py
Last active January 16, 2025 17:13
Dummy Chess Engine for communicating with Chess GUI using UCI Protocol
import sys
import os
import re
if __name__ == '__main__':
while True:
line = sys.stdin.readline().strip() #input()
tokens = [ x.strip().lower() for x in re.split("\s+", line.strip()) ]
if tokens[0] == "uci":
sys.stdout.write("id name DummyCE\n")
@sadimanna
sadimanna / split_miniin100.py
Created January 16, 2025 05:00
Split miniImageNet data into Train and Test sets (Can also be used for any dataset with class folders inside directory structure)
import os
import shutil
import numpy as np
import random
if __name__ == '__main__':
TRAIN_FOLDER = './miniImageNet/train/'
TEST_FOLDER = './miniImageNet/test/'
TRAIN_SPLIT = 500
reg_loss = 0
for mod in self.model.modules():
if isinstance(mod, _BatchNorm):
if self.decay_bn:
for name, param in mod.named_parameters(recurse=False):
reg_loss = reg_loss + param.norm(2)
else:
for name, param in mod.named_parameters(recurse=False):
if not name.endswith("bias"):
reg_loss = reg_loss + param.norm(2)
@tf.keras.saving.register_keras_serializable(name="weighted_categorical_crossentropy")
def weighted_categorical_crossentropy(target, output, weights, axis=-1):
target = tf.convert_to_tensor(target)
output = tf.convert_to_tensor(output)
target.shape.assert_is_compatible_with(output.shape)
weights = tf.reshape(tf.convert_to_tensor(weights, dtype=target.dtype), (1,-1))
# Adjust the predictions so that the probability of
# each class for every sample adds up to 1
# This is needed to ensure that the cross entropy is
@tf.keras.saving.register_keras_serializable(name="weighted_binary_crossentropy")
def weighted_binary_crossentropy(target, output, weights):
target = tf.convert_to_tensor(target)
output = tf.convert_to_tensor(output)
weights = tf.convert_to_tensor(weights, dtype=target.dtype)
epsilon_ = tf.constant(tf.keras.backend.epsilon(), output.dtype.base_dtype)
output = tf.clip_by_value(output, epsilon_, 1.0 - epsilon_)
# Compute cross entropy from probabilities.
@tf.keras.saving.register_keras_serializable(name="WeightedCategoricalCrossentropy")
class WeightedCategoricalCrossentropy:
def __init__(
self,
weights,
label_smoothing=0.0,
axis=-1,
name="weighted_categorical_crossentropy",
fn = None,
):
@tf.keras.saving.register_keras_serializable(name="WeightedBinaryCrossentropy")
class WeightedBinaryCrossentropy:
def __init__(
self,
label_smoothing=0.0,
weights = [1.0, 1.0],
axis=-1,
name="weighted_binary_crossentropy",
fn = None,
):
@echo off
setlocal enabledelayedexpansion
REM Set the paths for the main image folder, train folder, val folder, and test folder
set "image_folder=C:\Path\to\ImageFolder"
set "train_folder=C:\Path\to\train"
set "val_folder=C:\Path\to\val"
set "test_folder=C:\Path\to\test"
REM Create the train, val, and test folders if they don't exist
REM Set the paths for Folder A, train folder, val folder, and test folder
set "mask_folder_path=%base_path%\annotations\trimaps"
set "train_mask_path=%base_path%\train_masks"
REM Create the target folder if it doesn't exist
mkdir "%train_mask_path%"
set "extension=.EXT"
REM Move label files from the label folder to the train folder based on file names
for %%F in ("%train_image_path%\*") do (
set "file_name=%%~nF"
move "%mask_folder_path%\!file_name!%extension%" "%train_mask_path%\"