Skip to content

Instantly share code, notes, and snippets.

View kashif's full-sized avatar

Kashif Rasul kashif

  • Berlin, Germany
  • 11:01 (UTC +01:00)
  • X @krasul
View GitHub Profile
from collections import defaultdict
import numpy as np
import pandas as pd
from rich.console import Console
from rich.table import Table
import torch
import torch.nn as nn
# coding=utf-8
# Copyright 2023 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
@kashif
kashif / keras_metrics.py
Created November 1, 2021 10:08
pt-keras-metrics
import tensorflow as tf
import torch
from torchmetrics import Metric
def tf2pt(x_tf=None):
if x_tf is None:
return None
@kashif
kashif / batch_SGDEN.py
Last active September 7, 2020 14:39
Batch SGD ElasticNet
from sklearn.datasets import load_boston
from sklearn.linear_model import (LinearRegression, Ridge, SGDRegressor,
Lasso, ElasticNetCV)
from sklearn.preprocessing import MinMaxScaler
import numpy as np
#from minepy import MINE
from sklearn.metrics import mean_squared_error
@kashif
kashif / batch_EN.py
Last active September 7, 2020 14:40
Batch ElasticNet
from sklearn.datasets import load_boston
from sklearn.linear_model import (LinearRegression, Ridge, LassoCV, ElasticNetCV,
ElasticNet, Lasso, RandomizedLasso)
from sklearn.feature_selection import RFE, f_regression
from sklearn.preprocessing import MinMaxScaler
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
import numpy as np
import pdb
#from minepy import MINE
@kashif
kashif / batch_embedded_GBRT.py
Last active September 7, 2020 14:40
Batch GBRT
from sklearn.datasets import load_boston
from sklearn.linear_model import (LinearRegression, Ridge,
Lasso, RandomizedLasso)
from sklearn.feature_selection import RFE, f_regression
from sklearn.preprocessing import MinMaxScaler
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
import numpy as np
#from minepy import MINE
from sklearn.metrics import mean_squared_error
@kashif
kashif / evonorm2d.py
Last active April 10, 2020 22:40
EvoNorm-S0 in PyTorch from https://arxiv.org/pdf/2004.02967.pdf
import torch
import torch.nn as nn
class EvoNorm2d(nn.Module):
__constants__ = ['num_features', 'eps', 'nonlinearity']
def __init__(self, num_features, eps=1e-5, nonlinearity=True):
super(EvoNorm2d, self).__init__()
@kashif
kashif / input_fn.py
Last active March 2, 2019 16:42
TensorFlow 1.x Estimator input pipeline function to read images organised in their class folders
def input_fn(file_pattern, labels,
image_size=(224,224),
shuffle=False,
batch_size=64,
num_epochs=None,
buffer_size=4096,
prefetch_buffer_size=None):
table = tf.contrib.lookup.index_table_from_tensor(mapping=tf.constant(labels))
num_classes = len(labels)
@kashif
kashif / acc_sgd.py
Created March 16, 2018 11:32
AccSGD optimizer for keras
class AccSGD(Optimizer):
"""AccSGD optimizer.
Arguments:
lr (float): learning rate
kappa (float, optional): ratio of long to short step (default: 1000)
xi (float, optional): statistical advantage parameter (default: 10)
smallConst (float, optional): any value <=1 (default: 0.7)
# References
@kashif
kashif / amsgrad.py
Last active May 13, 2019 14:21
Keras implementation of AMSGrad optimizer from "On the Convergence of Adam and Beyond" paper
class AMSgrad(Optimizer):
"""AMSGrad optimizer.
Default parameters follow those provided in the Adam paper.
# Arguments
lr: float >= 0. Learning rate.
beta_1: float, 0 < beta < 1. Generally close to 1.
beta_2: float, 0 < beta < 1. Generally close to 1.
epsilon: float >= 0. Fuzz factor.