Skip to content

Instantly share code, notes, and snippets.

@SWORDIntel
Last active April 25, 2025 17:40
Show Gist options
  • Save SWORDIntel/8c5ebf09d4e5c62059a9e8aed3b4b6a5 to your computer and use it in GitHub Desktop.
Save SWORDIntel/8c5ebf09d4e5c62059a9e8aed3b4b6a5 to your computer and use it in GitHub Desktop.
Sort Combos via RAM/NVME
#!/usr/bin/env python3
"""
sort_credentials.py
TUI-style splitter for EMAIL:PASS dumps.
• Lists all files in cwd and prompts you to pick one.
• Creates Sortedmails/ and, for each domain, .txt, .csv, .json.
• Supports resume via a hidden state file.
• Shows a tqdm progress bar so you know it’s alive.
• Regenerates JSON arrays at the end to keep them valid.
"""
import os
import sys
import logging
import json
from glob import glob
# you’ll need tqdm: `pip3 install tqdm`
from tqdm import tqdm
def select_file():
current_script = os.path.basename(__file__)
candidates = [
f for f in os.listdir('.')
if os.path.isfile(f)
and f != current_script
and not f.startswith('Sortedmails')
]
if not candidates:
print("❌ No files found to process in this directory.")
sys.exit(1)
print("\nSelect a file to process:\n")
for i, fn in enumerate(candidates, 1):
print(f" {i}. {fn}")
choice = input(f"\nEnter number [1–{len(candidates)}]: ").strip()
try:
idx = int(choice) - 1
return candidates[idx]
except:
print("❌ Invalid selection.")
sys.exit(1)
def main():
logging.basicConfig(
level=logging.INFO,
format='[%(asctime)s] %(levelname)s: %(message)s',
datefmt='%H:%M:%S'
)
infile = select_file()
outdir = 'Sortedmails'
os.makedirs(outdir, exist_ok=True)
# state file to remember where we left off
state_file = os.path.join(outdir, f".{infile}.state")
# count total lines
logging.info(f"Counting total lines in {infile}…")
with open(infile, 'r', encoding='utf-8', errors='ignore') as f:
total_lines = sum(1 for _ in f)
# decide where to start
start = 0
if os.path.exists(state_file):
ans = input(f"Resume from last run? [y/N]: ").strip().lower()
if ans == 'y':
try:
start = int(open(state_file).read().strip())
except:
start = 0
logging.info(f"Processing {total_lines} lines, starting at line {start}.")
# process input and write per-line (open/close each time to avoid fd leak)
try:
with open(infile, 'r', encoding='utf-8', errors='ignore') as fin:
for lineno, raw in enumerate(
tqdm(fin, total=total_lines, initial=start, desc="Sorting")
):
if lineno < start:
continue
line = raw.strip()
if not line or ':' not in line:
continue
email, password = line.split(':', 1)
if '@' not in email:
continue
domain = email.split('@',1)[1]
# prepare output paths
txt_path = os.path.join(outdir, f"{domain}.txt")
csv_path = os.path.join(outdir, f"{domain}.csv")
# write to .txt
with open(txt_path, 'a', encoding='utf-8') as txt_f:
txt_f.write(f"{email}:{password}\n")
# write to .csv (add header if new)
write_header = not os.path.exists(csv_path) or os.path.getsize(csv_path) == 0
with open(csv_path, 'a', encoding='utf-8') as csv_f:
if write_header:
csv_f.write("email,password\n")
csv_f.write(f"{email},{password}\n")
# checkpoint after this line
with open(state_file, 'w') as sf:
sf.write(str(lineno + 1))
except Exception as e:
logging.error(f"Error during processing: {e}")
sys.exit(1)
logging.info("Text/CSV split done → generating JSON outputs…")
# regenerate JSON per-domain from CSV
csv_list = glob(os.path.join(outdir, "*.csv"))
for csv_f in tqdm(csv_list, desc="JSON"):
domain = os.path.splitext(os.path.basename(csv_f))[0]
json_path = os.path.join(outdir, f"{domain}.json")
data = []
with open(csv_f, 'r', encoding='utf-8') as cf:
next(cf) # skip header
for row in cf:
e,p = row.strip().split(',',1)
data.append({"email": e, "password": p})
with open(json_path, 'w', encoding='utf-8') as jf:
json.dump(data, jf, indent=2)
logging.info("All done! ✅")
print(f"\nFiles written under ./{outdir}/\n")
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment