Skip to content

Instantly share code, notes, and snippets.

View BexTuychiev's full-sized avatar
🏠
Working from home

bexgboost BexTuychiev

🏠
Working from home
View GitHub Profile
This is a test gist. 13894950uijklakd#$%^&*'\\./
import optuna
import xgboost as xgb
from sklearn.metrics import mean_squared_error # or any other metric
from sklearn.model_selection import train_test_split
# Load the dataset
X, y = ... # load your own
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Define the objective function for Optuna
import pandas as pd
import numpy as np
import string
# Set the desired number of rows and columns
num_rows = 10_000_000
num_cols = 10
chunk_size = 100_000
# Define an empty DataFrame to store the chunks
@BexTuychiev
BexTuychiev / db_benchmark.py
Created March 27, 2023 17:07
A benchmark code that measures the computation time of Pandas PyArrow Backend, Polars and Data.table
import time
import datatable as dt
import pandas as pd
import polars as pl
# Define a DataFrame to store the results
results_df = pd.DataFrame(
columns=["Function", "Library", "Runtime (s)"]
)
import os
import random
import time
import numpy as np
import pandas as pd
from faker import Faker
# Set seed for reproducibility
random.seed(42)
FROM nvidia/cuda:11.2.0-runtime-ubuntu20.04
# install utilities
RUN apt-get update && \
apt-get install --no-install-recommends -y curl
ENV CONDA_AUTO_UPDATE_CONDA=false \
PATH=/opt/miniconda/bin:$PATH
RUN curl -sLo ~/miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-py38_4.9.2-Linux-x86_64.sh \
&& chmod +x ~/miniconda.sh \
$ dvc remove dvclive.dvc models.dvc
$ rm -rf dvclive models
$ git add - all
$ git commit -m "Remove all experiments"
$ git tag "cnn32"
$ dvc add models dvclive
$ git commit -am "exp: CNN All layers, 0.992 val accuracy"
$ git add - all
$ git commit -m "Update train.py to use a DVCLive logger"
$ git tag cnn-all-layers
$ python src/train.py
# Set the parameters
params = {
"image_width": 30,
"image_height": 30,
"batch_size": 32,
"learning_rate": 0.001,
"n_epochs": 10
}
logger.log_params(params)