Skip to content

Instantly share code, notes, and snippets.

View c2h2's full-sized avatar

Yiling Cao c2h2

View GitHub Profile
@c2h2
c2h2 / get_libgen_scimag.py
Created July 10, 2022 11:47
get libgen scimag torrents python3
import urllib.request
import requests
import lxml.html
import os
from urllib.parse import urlparse
target="http://libgen.rs/scimag/repository_torrent/"
allowed_exts=[".torrent"]
content = ""
@c2h2
c2h2 / goroutine_limiter_example.go
Created April 30, 2022 08:50
limit number of goroutine concurrently running.
package main
import (
"fmt"
"strconv"
"sync"
"time"
)
@c2h2
c2h2 / solve_wordly.py
Created February 6, 2022 17:34
solve_wordly.py,
import sys
f = open("allowed_words.txt", "r")
#f = open("possible_words.txt", "r")
#"allowed_words.txt" from https://github.com/3b1b/videos/tree/master/_2022/wordle_data
content = f.read()
words = content.split("\n")
if len(sys.argv)!=4:
@c2h2
c2h2 / snapclient.desktop
Created February 3, 2022 12:55
autostart snapclient
[Desktop Entry]
Name=Snapclient
Type=Application
Exec=/usr/bin/snapclient -h 192.168.2.222
Terminal=false
#add to /etc/xdg/autostart/snapclient.desktop
@c2h2
c2h2 / process_sony_jpgs.py
Created October 1, 2021 15:31
Create smaller jpgs for XL jpgs, for my sony alpha A7R4
import os
import glob
import subprocess
# set search path and glob for files
# here we want to look for csv files in the input directory
path = '.'
files = glob.glob(path + '/*.JPG')
def create_resize(f):
@c2h2
c2h2 / dell_fan_spd_control.py
Created August 26, 2021 14:37
manual control dell server fan speed.
import json
import subprocess
from datetime import datetime
ipmi_host="192.168.2.145"
ipmi_pass="xxxx"
ideal_temp = 30
ts = str(datetime.now().strftime("%Y-%d-%m_%H:%M:%S"))
@c2h2
c2h2 / vzdump_with_name.sh
Last active August 27, 2021 03:44
proxmox vzdump with servername and xz (parallel)
#!/bin/bash
dumpdir=/zfs_hdd4t/vmbackup2/
mkdir -p $dumpdir
vzdump_vma_name_xz(){
#$1 vmid, $2 name
ts=`date +"%Y-%m-%d_%H%M"`
echo dump $1 $2
cd $dumpdir
@c2h2
c2h2 / wiki-crawler.md
Created December 8, 2020 10:27 — forked from jsaneu/wiki-crawler.md
MediaWiki Crawler

Gets the website

wget -nH --reject-regex 'Especial|Special|Ayuda|Help|action|printable|Archivo:' --recursive --page-requisites --html-extension --convert-links --restrict-file-names=windows --domains domain.com --no-parent http://domain.com/wiki

Remove external links regexp

Find: (<a[^>]*href="http)[^"]*("[^>]*>)([^"]*)(</a>)

Replace: $3

opkg update
opkg install ca-certificates libustream-openssl tmux byobu unzip vim iptables-mod-extra kmod-ipt-extra
wget -O v2ray-linux-64.zip https://github.com/v2ray/v2ray-core/releases/download/v4.23.1/v2ray-linux-64.zip
mkdir v2ray
cd v2ray
unzip ../v2ray-linux-64.zip
#solution: https://www.youtube.com/watch?v=5o-kdjv7FD0
#build the BFS tree and traverse the tree
class Step():
def __init__(self):
self.child_steps=[]
self.parent = None
self.step = 0
self.total_num_steps=0