Created
January 2, 2016 16:00
-
-
Save HelloZeroNet/4676319dadfe61366247 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import time | |
import urllib | |
import urllib2 | |
import hashlib | |
import struct | |
import socket | |
import bencode | |
import gevent | |
import os | |
import re | |
import json | |
import cStringIO as StringIO | |
from subtl.subtl import UdpTrackerClient | |
import gevent.monkey | |
import maxminddb | |
from gevent.pool import Pool | |
gevent.monkey.patch_all() | |
class SiteTest: | |
def __init__(self): | |
self.address = "Hello" | |
self.ips = {} | |
self.pool = Pool(20) # Concurrency | |
jobs = [] | |
for line in open("trackers.txt"): | |
line = line.strip() | |
if "://" not in line: | |
continue | |
protocol, address = line.split("://") | |
if protocol in ["http", "udp"]: | |
jobs.append([protocol, address]) | |
print "Trackers found in trackers.txt: %s" % len(jobs) | |
unique = [] | |
[unique.append(item) for item in jobs if item not in unique] | |
print "Unique: %s" % len(unique) | |
jobs = unique | |
threads = [] | |
for job in jobs: | |
thread = self.pool.spawn(self.announceTracker, *job) | |
thread.job = job | |
threads.append(thread) | |
gevent.joinall(threads) | |
print "Done." | |
report = open("report.txt", "w") | |
good_trackers = open("good_trackers.txt", "w") | |
bad_trackers = open("bad_trackers.txt", "w") | |
geodb = maxminddb.open_database("GeoLite2-City.mmdb") | |
for thread in sorted(threads, cmp=lambda a, b: 1 if a.value[1] > b.value[1] else -1): | |
if not thread.value[0]: # Bad tracker | |
bad_trackers.write("%s %s\n" % (thread.job, thread.value)) | |
continue | |
ip = socket.gethostbyname(re.sub("(.*?)[/:].*", "\\1", thread.job[1])) | |
try: | |
country = geodb.get(ip)["country"]["iso_code"] | |
except: | |
country = "n/a" | |
report.write( | |
"%2s %.3fs %s\n" % (country, thread.value[1], "://".join(thread.job)) | |
) | |
good_trackers.write("://".join(thread.job) + "\n\n") | |
json.dump(self.ips, open("duplicates.txt", "w"), indent=4) | |
def announceTracker(self, tracker_protocol, tracker_address, fileserver_port=0, add_types=[], my_peer_id="", mode="start"): | |
# Duplicates by IP | |
try: | |
ip_port = re.sub("(.*?)[/].*", "\\1", tracker_address) | |
if ":" in ip_port: | |
ip, port = ip_port.split(":") | |
else: | |
ip = ip_port | |
port = "80" | |
except: | |
print "-", | |
return False, "No IP found" | |
if tracker_protocol + "://" + ip + ":" + port in self.ips: | |
self.ips[tracker_protocol + "://" + ip + ":" + port].append(tracker_protocol + "://" + tracker_address) | |
print "D", | |
return False, "Duplicate" | |
self.ips[tracker_protocol + "://" + ip + ":" + port] = [tracker_protocol + "://" + tracker_address] | |
print ".", | |
s = time.time() | |
if not my_peer_id: | |
my_peer_id = "ZERONET-NM8eBGDivxWQ" | |
if tracker_protocol == "udp": # Udp tracker | |
try: | |
if "/" in tracker_address: | |
tracker_address, _ = tracker_address.split("/") | |
ip, port = tracker_address.split(":") | |
with gevent.Timeout(10, False): | |
tracker = UdpTrackerClient(ip, int(port)) | |
tracker.peer_port = fileserver_port | |
tracker.connect() | |
tracker.poll_once() | |
tracker.announce(info_hash=hashlib.sha1(self.address).hexdigest(), num_want=50) | |
back = tracker.poll_once() | |
if not back: | |
return False, "No response" | |
peers = back["response"]["peers"] | |
except Exception, err: | |
print "-", | |
return False, err | |
elif tracker_protocol == "http": # Http tracker | |
params = { | |
'info_hash': hashlib.sha1(self.address).digest(), | |
'peer_id': my_peer_id, 'port': fileserver_port, | |
'uploaded': 0, 'downloaded': 0, 'left': 0, 'compact': 1, 'numwant': 30, | |
'event': 'started' | |
} | |
req = None | |
response = "" | |
try: | |
url = "http://" + tracker_address + "?" + urllib.urlencode(params) | |
# Load url | |
with gevent.Timeout(10, False): | |
req = urllib2.urlopen(url, timeout=10) | |
response = req.read() | |
req.fp._sock.recv = None # Hacky avoidance of memory leak for older python versions | |
req.close() | |
req = None | |
if not response: | |
print "-", | |
return False, "No response" | |
# Decode peers | |
peer_data = bencode.decode(response)["peers"] | |
response = None | |
peer_count = len(peer_data) / 6 | |
peers = [] | |
for peer_offset in xrange(peer_count): | |
off = 6 * peer_offset | |
peer = peer_data[off:off + 6] | |
addr, port = struct.unpack('!LH', peer) | |
peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port}) | |
except Exception, err: | |
print "-", | |
return False, "%s, response: %s" % (err, response) | |
else: | |
peers = [] | |
print "+", | |
return True, time.time() - s, peers | |
if not os.path.isfile("GeoLite2-City.mmdb"): | |
db_path = "GeoLite2-City.mmdb" | |
import gzip | |
import shutil | |
print "Downloading GeoLite2 City database..." | |
# Download | |
file = urllib.urlopen("http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz") | |
data = StringIO.StringIO() | |
while True: | |
buff = file.read(1024 * 16) | |
if not buff: | |
break | |
data.write(buff) | |
print "GeoLite2 City database downloaded (%s bytes), unpacking..." % data.tell() | |
data.seek(0) | |
# Unpack | |
with gzip.GzipFile(fileobj=data) as gzip_file: | |
shutil.copyfileobj(gzip_file, open(db_path, "wb")) | |
SiteTest() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment