Skip to content

Instantly share code, notes, and snippets.

@discountry
Created October 14, 2025 08:46
Show Gist options
  • Select an option

  • Save discountry/09e0c4d79a3da2f729f6cb164938d010 to your computer and use it in GitHub Desktop.

Select an option

Save discountry/09e0c4d79a3da2f729f6cb164938d010 to your computer and use it in GitHub Desktop.
#!/usr/bin/env bash
set -euo pipefail
# ============================================================
# Binance Klines Evidence Collector (Judicial-grade)
# - 支持 symbol / interval / startTime / endTime / timeZone / limit
# - 自动分页(limit<=1000)
# - 保存:响应头、TLS 证书、DNS、traceroute、(可选)tcpdump
# - 计算 SHA256(每页+合并)、生成 metadata.json、(可选)OpenTimestamps
# - 在证据目录原地初始化 git(快照),打包时排除 .git
#
# 默认:ATOMUSDT 的 1h K 线,UTC 2025-10-10 ~ 2025-10-12
# (END 取 2025-10-13T00:00:00Z 覆盖 10/12 23:00 那根K线)
#
# 依赖建议:
# - 必需:curl, tar
# - 强烈建议:openssl(TLS 证据), jq(稳定合并/排序), dig/nslookup, traceroute/tracepath
# - 可选:tcpdump(抓包法证), ots(OpenTimestamps 时间戳), git(本地快照)
# ============================================================
API_HOST="api.binance.com"
BASE_URL="https://${API_HOST}/api/v3/klines"
# -------- 默认值(按你的需求) --------
SYMBOL="ATOMUSDT"
INTERVAL="1h"
START_ARG="2025-10-10T00:00:00Z"
END_ARG="2025-10-13T00:00:00Z" # 覆盖到 10/12 23:00 那根K线
TIMEZONE="0"
LIMIT="1000" # Binance: default 500, max 1000
OUTDIR=""
ENABLE_TCPDUMP=false
TCPDUMP_IFACE=""
OTS_ENABLE=true
# -------------------------------------
# ---- sha256 跨平台 ----
if command -v sha256sum >/dev/null 2>&1; then
SHA256_CMD="sha256sum"
elif command -v shasum >/dev/null 2>&1; then
SHA256_CMD="shasum -a 256"
else
echo "ERROR: no sha256sum or shasum found"; exit 1
fi
usage() {
cat <<EOF
Usage:
$0 [--symbol SYMBOL] [--interval INTERVAL] [--start TS] [--end TS] [--timezone Z]
[--limit N<=1000] [--outdir DIR] [--tcpdump] [--tcpdump-iface IFACE] [--no-ots]
Params:
--symbol STRING e.g. BTCUSDT
--interval ENUM 1s|1m|3m|5m|15m|30m|1h|2h|4h|6h|8h|12h|1d|3d|1w|1M
--start TIME ISO8601 / unix sec / unix ms
--end TIME ISO8601 / unix sec / unix ms
--timezone STRING Default 0 (UTC)
--limit INT 1..1000, default 1000
--outdir DIR default ./evidence_<UTC>_<SYMBOL>_<INTERVAL>
--tcpdump enable 20s capture (root may be required)
--tcpdump-iface IFACE eth0 / en0 ...
--no-ots disable OpenTimestamps step
Example:
$0 --symbol ETHUSDT --interval 1m --start "2025-10-14T00:00:00Z" --end "2025-10-14T02:00:00Z"
EOF
}
# ---- 解析参数 ----
while [[ $# -gt 0 ]]; do
case "$1" in
--symbol) SYMBOL="$2"; shift 2;;
--interval) INTERVAL="$2"; shift 2;;
--start) START_ARG="$2"; shift 2;;
--end) END_ARG="$2"; shift 2;;
--timezone) TIMEZONE="$2"; shift 2;;
--limit) LIMIT="$2"; shift 2;;
--outdir) OUTDIR="$2"; shift 2;;
--tcpdump) ENABLE_TCPDUMP=true; shift;;
--tcpdump-iface) TCPDUMP_IFACE="$2"; shift 2;;
--no-ots) OTS_ENABLE=false; shift;;
--help|-h) usage; exit 0;;
*) echo "Unknown arg: $1"; usage; exit 1;;
esac
done
# ---- 校验 interval ----
VALID_INTV="1s 1m 3m 5m 15m 30m 1h 2h 4h 6h 8h 12h 1d 3d 1w 1M"
if ! echo " $VALID_INTV " | grep -q " ${INTERVAL} "; then
echo "ERROR: invalid interval: ${INTERVAL}"
echo "Supported: ${VALID_INTV}"
exit 1
fi
# ---- 输出目录 ----
if [[ -z "${OUTDIR:-}" ]]; then
OUTDIR="./evidence_$(date -u +"%Y%m%dT%H%M%SZ")_${SYMBOL}_${INTERVAL}"
fi
mkdir -p "$OUTDIR"
LOG="$OUTDIR/run.log"
# ---- 工具路径 ----
CURL=$(command -v curl || true)
OPENSSL=$(command -v openssl || true)
DIG=$(command -v dig || command -v nslookup || true)
TRACEROUTE=$(command -v traceroute || command -v tracepath || true)
TCPDUMP=$(command -v tcpdump || true)
JQ=$(command -v jq || true)
OTS=$(command -v ots || true)
GIT=$(command -v git || true)
echo "Start UTC: $(date -u +"%Y-%m-%dT%H:%M:%SZ")" | tee -a "$LOG"
echo "Symbol=${SYMBOL} Interval=${INTERVAL} Limit=${LIMIT} TZ=${TIMEZONE}" | tee -a "$LOG"
# ---- interval -> 毫秒 ----
interval_to_ms() {
case "$1" in
1s) echo $((1*1000));;
1m) echo $((60*1000));;
3m) echo $((3*60*1000));;
5m) echo $((5*60*1000));;
15m) echo $((15*60*1000));;
30m) echo $((30*60*1000));;
1h) echo $((60*60*1000));;
2h) echo $((2*60*60*1000));;
4h) echo $((4*60*60*1000));;
6h) echo $((6*60*60*1000));;
8h) echo $((8*60*60*1000));;
12h) echo $((12*60*60*1000));;
1d) echo $((24*60*60*1000));;
3d) echo $((3*24*60*60*1000));;
1w) echo $((7*24*60*60*1000));;
1M) echo $((30*24*60*60*1000));; # approx
*) echo "0";;
esac
}
INTV_MS=$(interval_to_ms "$INTERVAL")
if [[ "$INTV_MS" -le 0 ]]; then echo "interval parse failed"; exit 1; fi
# ---- 解析时间到毫秒(支持 ISO8601 / unix 秒 / unix 毫秒) ----
to_ms() {
local input="$1"
if [[ -z "$input" ]]; then echo ""; return 0; fi
if [[ "$input" =~ ^[0-9]{13}$ ]]; then echo "$input"; return 0; fi
if [[ "$input" =~ ^[0-9]{10}$ ]]; then echo "$((input*1000))"; return 0; fi
if date -u -d "$input" +%s >/dev/null 2>&1; then
local s; s=$(date -u -d "$input" +%s)
echo "$((s*1000))"; return 0
fi
if date -u -j -f "%Y-%m-%dT%H:%M:%SZ" "$input" +%s >/dev/null 2>&1; then
local s; s=$(date -u -j -f "%Y-%m-%dT%H:%M:%SZ" "$input" +%s)
echo "$((s*1000))"; return 0
fi
echo "ERROR" >&2; return 1
}
START_MS=""; END_MS=""
if [[ -n "$START_ARG" ]]; then
START_MS=$(to_ms "$START_ARG") || { echo "Invalid --start: $START_ARG"; exit 1; }
fi
if [[ -n "$END_ARG" ]]; then
END_MS=$(to_ms "$END_ARG") || { echo "Invalid --end: $END_ARG"; exit 1; }
fi
if [[ -z "$END_MS" ]]; then
NOW_MS=$(( $(date -u +%s) * 1000 ))
END_MS=$(( NOW_MS - INTV_MS ))
fi
if [[ -n "$START_MS" ]] && [[ "$START_MS" -ge "$END_MS" ]]; then
echo "ERROR: start >= end"; exit 1
fi
echo "Time range (ms): start=${START_MS:-<unspecified>} end=${END_MS}" | tee -a "$LOG"
# ---- 预取证:DNS / TLS / traceroute ----
HOSTNAME="$API_HOST"
DNS_FILE="$OUTDIR/dns.txt"
if command -v dig >/dev/null 2>&1; then
{
echo "=== dig +short ${HOSTNAME} ==="
dig +short "$HOSTNAME"
echo
echo "=== dig ANY ${HOSTNAME} (answer) ==="
dig ANY "$HOSTNAME" +noall +answer
} > "$DNS_FILE" 2>&1 || true
elif command -v nslookup >/dev/null 2>&1; then
nslookup "$HOSTNAME" > "$DNS_FILE" 2>&1 || true
else
echo "No dig/nslookup" > "$DNS_FILE"
fi
echo "Saved DNS -> $DNS_FILE" | tee -a "$LOG"
CERT_PEM="$OUTDIR/tls_cert.pem"
CERT_FP="$OUTDIR/tls_cert_fingerprint.txt"
CERT_INFO="$OUTDIR/tls_cert_details.txt"
if command -v openssl >/dev/null 2>&1; then
echo | openssl s_client -connect "${HOSTNAME}:443" -servername "$HOSTNAME" 2>/dev/null \
| openssl x509 -outform PEM > "$CERT_PEM" || true
if [[ -s "$CERT_PEM" ]]; then
openssl x509 -in "$CERT_PEM" -noout -fingerprint -sha256 > "$CERT_FP" || true
openssl x509 -in "$CERT_PEM" -noout -text > "$CERT_INFO" || true
echo "Saved TLS cert & fingerprint" | tee -a "$LOG"
else
echo "openssl present but failed to fetch cert (network/MTLS/CF?)." | tee -a "$LOG"
fi
else
echo "No openssl; skip TLS evidence" | tee -a "$LOG"
fi
TRACE_FILE="$OUTDIR/traceroute.txt"
if command -v traceroute >/dev/null 2>&1; then
traceroute "$HOSTNAME" > "$TRACE_FILE" 2>&1 || true
elif command -v tracepath >/dev/null 2>&1; then
tracepath "$HOSTNAME" > "$TRACE_FILE" 2>&1 || true
else
echo "No traceroute/tracepath" > "$TRACE_FILE"
fi
echo "Saved traceroute -> $TRACE_FILE" | tee -a "$LOG"
# 可选 tcpdump
PCAP="$OUTDIR/tcpdump.pcap"
if [[ "$ENABLE_TCPDUMP" == true ]]; then
if command -v tcpdump >/dev/null 2>&1; then
echo "Capturing tcpdump 20s -> $PCAP (may need sudo)" | tee -a "$LOG"
if [[ -n "$TCPDUMP_IFACE" ]]; then
sudo tcpdump -i "$TCPDUMP_IFACE" -w "$PCAP" host "$HOSTNAME" -G 20 -W 1 >/dev/null 2>&1 || true
else
sudo tcpdump -w "$PCAP" host "$HOSTNAME" -G 20 -W 1 >/dev/null 2>&1 || true
fi
else
echo "tcpdump not found; skip" | tee -a "$LOG"
fi
fi
# ---- 抓取循环(按时间窗口分页) ----
PAGES_DIR="$OUTDIR/pages"
mkdir -p "$PAGES_DIR"
PAGE=1
TOTAL_ROWS=0
base_query() {
local q="symbol=${SYMBOL}&interval=${INTERVAL}&limit=${LIMIT}"
[[ -n "$TIMEZONE" ]] && q="${q}&timeZone=${TIMEZONE}"
echo "$q"
}
fetch_page() {
local pstart_ms="$1"
local pend_ms="$2"
local q="$(base_query)"
[[ -n "$pstart_ms" ]] && q="${q}&startTime=${pstart_ms}"
[[ -n "$pend_ms" ]] && q="${q}&endTime=${pend_ms}"
local url="${BASE_URL}?${q}"
local body="$PAGES_DIR/page_${PAGE}.json"
local headers="$PAGES_DIR/page_${PAGE}.headers"
local meta="$PAGES_DIR/page_${PAGE}.curlmeta"
local verbose="$PAGES_DIR/page_${PAGE}.curlverbose"
"$CURL" -s -D "$headers" -o "$body" \
-w "REMOTE_IP:%{remote_ip}\nHTTP_CODE:%{http_code}\nTIME_STARTTRANSFER:%{time_starttransfer}\n" \
"$url" > "$meta" 2> "$verbose"
echo "$body|$headers|$meta|$verbose"
}
next_from_last() {
local json_file="$1"
if [[ ! -s "$json_file" ]]; then echo ""; return 0; fi
if [[ -n "$JQ" ]]; then
jq -r 'if length>0 then .[-1][0] else empty end' "$json_file" 2>/dev/null || true
else
local last_line
last_line=$(tac "$json_file" | grep -m1 '\[' || true)
echo "$last_line" | sed -n 's/[^0-9]*\([0-9]\{13\}\).*/\1/p'
fi
}
CUR_START="${START_MS:-}"
if [[ -z "$CUR_START" ]]; then
CUR_END="$END_MS"
MAXP=2000
while [[ $PAGE -le $MAXP ]]; do
local_start=$(( CUR_END - INTV_MS*LIMIT ))
[[ "$local_start" -lt 0 ]] && local_start=0
out=$(fetch_page "$local_start" "$CUR_END")
IFS='|' read -r body headers meta verbose <<< "$out"
if [[ -n "$JQ" ]]; then rows=$(jq 'length' "$body" 2>/dev/null || echo 0)
else rows=$(grep -o '\[' "$body" | wc -l | awk '{print $1}')
fi
echo "Page $PAGE rows=$rows end=$CUR_END" | tee -a "$LOG"
[[ "$rows" -eq 0 ]] && break
TOTAL_ROWS=$((TOTAL_ROWS + rows))
PAGE=$((PAGE+1))
CUR_END=$(( local_start - 1 ))
[[ "$CUR_END" -le 0 ]] && break
if [[ "$CUR_END" -lt $(( ( $(date -u +%s) - 10*365*86400 ) * 1000 )) ]]; then break; fi
done
else
CUR_END="$END_MS"
while [[ "$CUR_START" -lt "$CUR_END" ]]; do
window_end=$(( CUR_START + INTV_MS*LIMIT - 1 ))
[[ "$window_end" -gt "$CUR_END" ]] && window_end="$CUR_END"
out=$(fetch_page "$CUR_START" "$window_end")
IFS='|' read -r body headers meta verbose <<< "$out"
if [[ -n "$JQ" ]]; then rows=$(jq 'length' "$body" 2>/dev/null || echo 0)
else rows=$(grep -o '\[' "$body" | wc -l | awk '{print $1}')
fi
echo "Page $PAGE rows=$rows range=[${CUR_START}, ${window_end}]" | tee -a "$LOG"
[[ "$rows" -eq 0 ]] && break
TOTAL_ROWS=$((TOTAL_ROWS + rows))
PAGE=$((PAGE+1))
last_ot=$(next_from_last "$body")
if [[ -n "$last_ot" ]]; then
CUR_START=$(( last_ot + INTV_MS ))
else
CUR_START=$(( CUR_START + INTV_MS*LIMIT ))
fi
done
fi
echo "Total rows fetched: $TOTAL_ROWS" | tee -a "$LOG"
# ---- 合并并排序 ----
MERGED_SORTED="$OUTDIR/klines_sorted.json"
if [[ -n "$JQ" ]]; then
jq -s 'add | sort_by(.[0]) | unique_by(.[0])' "$PAGES_DIR"/page_*.json > "$MERGED_SORTED" 2>/dev/null || true
else
# 建议安装 jq;此为最小化兼容
cat "$PAGES_DIR"/page_*.json > "$OUTDIR/klines_merged_raw.json" 2>/dev/null || true
cp "$OUTDIR/klines_merged_raw.json" "$MERGED_SORTED" 2>/dev/null || true
fi
# ---- 计算哈希 ----
HASH_DIR="$OUTDIR/hashes"; mkdir -p "$HASH_DIR"
for f in "$PAGES_DIR"/page_*.json; do
[[ -f "$f" ]] || continue
$SHA256_CMD "$f" > "$HASH_DIR/$(basename "$f").sha256"
done
for h in "$PAGES_DIR"/page_*.headers; do
[[ -f "$h" ]] || continue
$SHA256_CMD "$h" > "$HASH_DIR/$(basename "$h").sha256"
done
if [[ -s "$MERGED_SORTED" ]]; then
$SHA256_CMD "$MERGED_SORTED" > "$HASH_DIR/klines_sorted.json.sha256"
fi
for p in "$PAGES_DIR"/page_*.json; do
[[ -f "$p" ]] || continue
bname=$(basename "$p" .json)
combo="$OUTDIR/${bname}.headers_plus_body.bin"
cat "$PAGES_DIR/${bname}.headers" "$p" > "$combo"
$SHA256_CMD "$combo" > "$HASH_DIR/${bname}.headers_plus_body.sha256"
done
if [[ -s "$CERT_PEM" ]]; then
$SHA256_CMD "$CERT_PEM" > "$HASH_DIR/tls_cert.pem.sha256"
fi
# ---- 元数据 ----
META="$OUTDIR/metadata.json"
{
echo "{"
echo " \"symbol\": \"${SYMBOL}\","
echo " \"interval\": \"${INTERVAL}\","
echo " \"start_ms\": ${START_MS:-null},"
echo " \"end_ms\": ${END_MS},"
echo " \"timeZone\": \"${TIMEZONE}\","
echo " \"limit\": ${LIMIT},"
echo " \"pages\": $((PAGE-1)),"
echo " \"total_rows\": ${TOTAL_ROWS},"
echo " \"api_host\": \"${API_HOST}\","
echo " \"generated_utc\": \"$(date -u +"%Y-%m-%dT%H:%M:%SZ")\""
echo "}"
} > "$META"
[[ -n "$JQ" ]] && jq . "$META" > "$META.tmp" && mv "$META.tmp" "$META"
# ---- OpenTimestamps(可选) ----
if $OTS_ENABLE && command -v ots >/dev/null 2>&1 && [[ -s "$MERGED_SORTED" ]]; then
echo "OTS stamping merged file..." | tee -a "$LOG"
ots stamp "$MERGED_SORTED" || true
ots show "$MERGED_SORTED" > "$OUTDIR/ots_proof.txt" 2>&1 || true
else
echo "Skip OTS (disabled or not installed or no merged file)." | tee -a "$LOG"
fi
# ---- git 快照(在 OUTDIR 原地建仓,避免自拷贝递归)----
if command -v git >/dev/null 2>&1; then
(
cd "$OUTDIR"
if [ ! -d ".git" ]; then
git init >/dev/null 2>&1 || true
fi
git add -A >/dev/null 2>&1 || true
git commit -m "binance evidence $(date -u +"%Y-%m-%dT%H:%M:%SZ")" >/dev/null 2>&1 || true
)
echo "Created git snapshot in $OUTDIR/.git" | tee -a "$LOG"
else
echo "git not found; skipped local git snapshot" | tee -a "$LOG"
fi
# ---- 打包(排除 .git 目录)----
TAR="$OUTDIR/evidence_$(date -u +"%Y%m%dT%H%M%SZ")_${SYMBOL}_${INTERVAL}.tar.gz"
tar --exclude='.git' -czf "$TAR" -C "$OUTDIR" . || true
echo "---------------------------------------------" | tee -a "$LOG"
echo "Evidence directory: $OUTDIR" | tee -a "$LOG"
echo "Merged klines: $MERGED_SORTED" | tee -a "$LOG"
echo "Hashes dir: $HASH_DIR" | tee -a "$LOG"
echo "Tarball: $TAR" | tee -a "$LOG"
echo "Quick verify:" | tee -a "$LOG"
echo " $SHA256_CMD \"$MERGED_SORTED\" # compare with $HASH_DIR/klines_sorted.json.sha256" | tee -a "$LOG"
echo " grep -i '^Date:' $OUTDIR/pages/page_1.headers # server time header" | tee -a "$LOG"
if command -v openssl >/dev/null 2>&1; then
echo " openssl x509 -in \"$OUTDIR/tls_cert.pem\" -noout -fingerprint -sha256 # match $OUTDIR/tls_cert_fingerprint.txt" | tee -a "$LOG"
else
echo " (install openssl to verify TLS fingerprint)" | tee -a "$LOG"
fi
echo "Done." | tee -a "$LOG"
ots upgrade ./evidence_20251014T082630Z_ATOMUSDT_1h/klines_sorted.json.ots
ots info ./evidence_.../klines_sorted.json.ots > ./evidence_.../ots_proof.txt
ots verify ./evidence_.../klines_sorted.json.ots >> ./evidence_.../ots_proof.txt 2>&1 || true
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment