Skip to content

Instantly share code, notes, and snippets.

@maesoser
Last active March 4, 2024 08:00
Show Gist options
  • Save maesoser/ef059dc618538040828cd2cad2d96ed4 to your computer and use it in GitHub Desktop.
Save maesoser/ef059dc618538040828cd2cad2d96ed4 to your computer and use it in GitHub Desktop.
Fast and small multithreading http scanner and html title grabber
#!/usr/bin/env python3
'''
python3 -u titleget --ports 8080 80 443 8443 5601 3000 1900 9000 4444 9090 7777 5555 2332 8888 6002 6001 8000 80 1234 7001 50100 3128 20183 \
--threads 512 \
--input targets --output targets.json
'''
import urllib3
from time import sleep
from datetime import datetime
import argparse
import logging
import re, os, json
import multiprocessing.dummy as mp
import ipaddress
import random
import dns.resolver
import sys
from urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
DEFAULT_UA = 'Mozilla Firefox Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0.'
nameservers = [
"1.0.0.1", "1.1.1.1", # Cloudflare
"8.8.4.4", "8.8.8.8", # Google
"208.67.222.222", "208.67.220.220", # XX
"9.9.9.9", "149.112.112.112", # Quad
"64.6.64.6", "64.6.65.6" # xx
]
def reverse_lookup(ipstr):
ip = IPv4Address(ipstr)
shuffle(nameservers)
entry = dig(str(ip.reverse_pointer), "PTR", nameservers)
if entry['success'] == True:
return entry['response']
return []
def check_cve(url):
try:
response = requests.get(
url,
headers = { "Range" : "bytes=0-18446744073709551615", 'User-Agent': DEFAULT_UA },
timeout=(3, 3),
verify=False
)
if response.status_code == 416:
return True
except Exception as e:
return False
return False
def get_title(data):
out = None
data = data.replace("\r\n","").replace("\n","").replace("\t","")
matches = re.findall(r"<title>(.*?)<\/title>", data, re.MULTILINE)
if len(matches) >= 1:
out = matches[0].strip()
elif len(data) < 64:
out = data
out = (out[:64] + '..') if len(out) > 64 else out
return out
def read_file(filename, ports, output):
min_prefix = 24
out = []
if not os.path.isfile(filename):
return out
file1 = open(filename, 'r')
while True:
line = file1.readline()
if not line:
break
if ":" in line:
print("Found : in input file")
break
if "#" in line:
print("Found # in input fil, skipping")
continue
line = line.replace("\n","").replace(" ","")
network = ipaddress.IPv4Network(line)
if network.prefixlen >= min_prefix:
json_obj = { "network": network, "output": output, "port": ports }
out.append(json_obj)
else:
for subnet in list(network.subnets(new_prefix=min_prefix)):
json_obj = { "network": subnet, "output": output, "port": ports }
out.append(json_obj)
file1.close()
random.shuffle(out)
return out
def write_file(filename, proxylist):
f = open(filename, 'a+')
for item in proxylist:
f.write( json.dumps(item) + "\n" )
f.close()
def check_subnet(obj):
results = []
for ip in obj["network"]:
result = check_server(ip, obj["port"])
results += result
write_file(obj["output"], results)
def check_server(addr, ports):
results = []
for port in ports:
proto = "http"
if "443" in port:
proto = "https"
url = "{}://{}:{}".format(proto, addr, port)
result = do_request(url)
result["hostnames"] = reverse_lookup(addr)
if result["ok"]:
results.append(result)
return results
def do_request(url):
target = { "url": url }
try:
r = requests.get(url, timeout=(3, 3), verify=False, allow_redirects=True, headers={'User-Agent': DEFAULT_UA})
target["text"] = r.reason
target["code"] = r.status_code
target["ok"] = r.ok
target["server"] = r.headers.get("server")
target["CVE-2015-1635"] = False
if "IIS" in target["server"]:
target["CVE-2015-1635"] = check_cve(url)
target["text"] = get_title(r.text)
logging.info("{}\t{}".format(target["url"], target["text"]))
except Exception as e:
target["text"] = "{}".format(e)
target["code"] = 0
target["ok"] = False
pass
return target
def get_args():
parser = argparse.ArgumentParser(description="Get basic information about a web server like the title and server header")
parser.add_argument('--ports', required=True, help='Ports to check', default=0, nargs='+')
parser.add_argument('--threads', required=False, help='Number of threads', type=int, default=384)
parser.add_argument('--input', required=False, help='Input database file', default="targets")
parser.add_argument('--output', required=False, help='Input database file', default="targets.json")
args = parser.parse_args()
args.threads = int(args.threads)
return args
if __name__ == "__main__":
args = get_args()
FORMAT = '[%(asctime)s] %(levelname)s %(message)s'
logging.basicConfig(level=logging.INFO, format=FORMAT)
logging.info("Ports are {}, loading targets".format(args.ports))
targets = read_file(args.input, args.ports, args.output)
logging.info("Using {} threads to crunch {} networks".format(args.threads, len(targets)))
p = mp.Pool(args.threads)
results = p.map(check_subnet,targets)
p.close()
p.join()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment