Skip to content

Instantly share code, notes, and snippets.

View Luxter77's full-sized avatar
💭
In the process of imploding

Lucas Daniel Velazquez M. Luxter77

💭
In the process of imploding
View GitHub Profile
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
from zabbix_utils import ZabbixAPI
import json
TOKEN = "FILLME"
API_URL = ""
api = ZabbixAPI(url=API_URL)
api.login(token=TOKEN)
#!/usr/bin/env python3
from genericpath import isfile
from pdf2image import convert_from_path, pdfinfo_from_path
import collections, sys, os
from tqdm.auto import tqdm, trange
from glob import glob
@Luxter77
Luxter77 / BREAKME.py
Created May 3, 2025 00:56
KEYME_BREAKME_WIKI_DUMPER_QUESTIONMARK
#!/usr/bin/python3
import requests as re
from tqdm.auto import tqdm
from pprint import pformat
from IPython import embed as IPy_embed
import json
@Luxter77
Luxter77 / Remove-EmptyFolder.ps1
Created February 21, 2025 23:28
Some functions I use on powershell
param([string]$P = (Get-Location).Path)
$dirs = get-childItem -Recurse -Depth 123456789 -Path $P | Where { (Get-ChildItem $_.FullName).count -eq 0 } | select -expandproperty FullName;
$vacio = $dirs | Where-Object {
Write-Progress " Checking $_ ";
(((Get-ChildItem "$_/").count -eq 0) -and ($_.ToLower() -notlike "*tmp*"))
} | ForEach-Object {
Write-Progress " Removing: $_ ";
Remove-Item -Force $_;
};
from bs4 import BeautifulSoup
from tqdm.auto import tqdm
from pprint import pprint
import csv
mails = list(csv.reader(open("inconvenientes042022.csv", "r", encoding="utf8")))
mails.pop(0)
dataset = []

APERTIUM

#!/usr/bin/env bash
# Repo
curl -sS https://apertium.projectjj.com/apt/install-nightly.sh | sudo bash

# Install
sudo apt install libboost-dev libgoogle-perftools-dev libicu-dev cmake subversion build-essential pkg-config gawk libxml2 libxml2-dev libxml2-utils xsltproc flex automake libtool libpcre3-dev zlib1g-dev locales build-essential automake subversion git pkg-config libtool apertium-all-dev
@Luxter77
Luxter77 / attention.py
Created December 3, 2024 03:01 — forked from iamlemec/attention.py
Using KV cache with mixed causal/non-causal attention.
import torch
from transformers.models.roberta import RobertaConfig, RobertaModel, RobertaTokenizer
# load model and tokenizer
tokenizer = RobertaTokenizer.from_pretrained('FacebookAI/roberta-base')
model = RobertaModel.from_pretrained('FacebookAI/roberta-base', is_decoder=True).to('cuda')
# tokenize inputs
text = 'hello world, this is a test'
inputs = tokenizer(text, return_tensors='pt').to('cuda')
#!/usr/bin/env python3
import json
import subprocess
from flask import Flask, request
app = Flask(__name__)
@app.route('/api/translate', methods=['POST'])
def translate():
@Luxter77
Luxter77 / funnel_layer.py
Last active February 27, 2024 13:14
Torch Sequence Funnel Layer
from torch import Tensor
import torch.nn as nn
class FunnelLayer(nn.Module):
def __init__(self, input_length: int, output_length: int, hidden_size: int, conv_size: int, num_heads, deconv: bool = False):
"""
Initialize the FunnelLayer.
Args:
input_length (int): The length of the input sequence.