Last active
August 29, 2015 14:04
-
-
Save danielvijge/93ec631fb1d6d19a43a8 to your computer and use it in GitHub Desktop.
Viacom fix for USTV VoD
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python | |
# -*- coding: utf-8 -*- | |
import _addoncompat | |
import cookielib | |
import os | |
import simplejson | |
import urllib | |
import urllib2 | |
import socks | |
import socket | |
import time | |
import xbmc | |
from dns.resolver import Resolver | |
from httplib import HTTPConnection | |
PLUGINPATH = xbmc.translatePath(_addoncompat.get_path()) | |
RESOURCESPATH = os.path.join(PLUGINPATH,'resources') | |
CACHEPATH = os.path.join(RESOURCESPATH,'cache') | |
COOKIE = os.path.join(CACHEPATH,'cookie.txt') | |
DNS_REFESH_DELAY = 10 | |
IPURL = 'http://icanhazip.com' | |
IPFILE = os.path.join(CACHEPATH,'ip.txt') | |
class MyHTTPConnection(HTTPConnection): | |
_dnsproxy = [] | |
def connect(self): | |
resolver = Resolver() | |
resolver.nameservers = self._dnsproxy | |
answer = resolver.query(self.host, 'A') | |
self.host = answer.rrset.items[0].address | |
self.sock = socket.create_connection((self.host, self.port)) | |
class MyHTTPHandler(urllib2.HTTPHandler): | |
_dnsproxy = [] | |
def http_open(self, req): | |
MyHTTPConnection._dnsproxy = self._dnsproxy | |
return self.do_open(MyHTTPConnection, req) | |
class SocksiPyConnection(HTTPConnection): | |
def __init__(self, proxytype, proxyaddr, proxyport = None, rdns = True, username = None, password = None, *args, **kwargs): | |
self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) | |
HTTPConnection.__init__(self, *args, **kwargs) | |
def connect(self): | |
self.sock = socks.socksocket() | |
self.sock.setproxy(*self.proxyargs) | |
if isinstance(self.timeout, float): | |
self.sock.settimeout(self.timeout) | |
self.sock.connect((self.host, self.port)) | |
class SocksiPyHandler(urllib2.HTTPHandler): | |
def __init__(self, *args, **kwargs): | |
self.args = args | |
self.kw = kwargs | |
urllib2.HTTPHandler.__init__(self) | |
def http_open(self, req): | |
def build(host, port=None, strict=None, timeout=0): | |
conn = SocksiPyConnection(*self.args, host = host, port = port, strict = strict, timeout = timeout, **self.kw) | |
return conn | |
return self.do_open(build, req) | |
def prepare_dns_proxy(cj): | |
update_url = _addoncompat.get_setting('dns_update_url') | |
if update_url: | |
try: | |
t = os.path.getmtime(IPFILE) | |
now = time.time() | |
elapsed = now - t | |
except: | |
elapsed = -1 | |
try: | |
file = open(IPFILE, 'r') | |
oldip = file.read() | |
file.close() | |
except: | |
oldip = '' | |
if elapsed > DNS_REFESH_DELAY or elapsed == -1: | |
myip = getURL(IPURL, connectiontype = 0) | |
if myip != oldip: | |
oldip = myip | |
getURL(update_url, connectiontype = 0) | |
newfile = file = open(IPFILE, 'w') | |
file.write(oldip) | |
file.close() | |
dnsproxy = [] | |
dnsproxy.append(_addoncompat.get_setting('dns_proxy')) | |
dnsproxy.append(_addoncompat.get_setting('dns_proxy_2')) | |
MyHTTPHandler._dnsproxy = dnsproxy | |
opener = urllib2.build_opener(MyHTTPHandler, urllib2.HTTPCookieProcessor(cj)) | |
return opener | |
def prepare_us_proxy(cj): | |
if (_addoncompat.get_setting('us_proxy_socks5') == 'true'): | |
if ((_addoncompat.get_setting('us_proxy_pass') is not '') and (_addoncompat.get_setting('us_proxy_user') is not '')): | |
print 'Using socks5 authenticated proxy: ' + _addoncompat.get_setting('us_proxy') + ':' + _addoncompat.get_setting('us_proxy_port') | |
opener = urllib2.build_opener(SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, _addoncompat.get_setting('us_proxy'), int(_addoncompat.get_setting('us_proxy_port')), True, _addoncompat.get_setting('us_proxy_user'), _addoncompat.get_setting('us_proxy_pass'))) | |
else: | |
print 'Using socks5 proxy: ' + _addoncompat.get_setting('us_proxy') + ':' + _addoncompat.get_setting('us_proxy_port') | |
opener = urllib2.build_opener(SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, _addoncompat.get_setting('us_proxy'), int(_addoncompat.get_setting('us_proxy_port')))) | |
elif (_addoncompat.get_setting('us_proxy_socks5') == 'false'): | |
us_proxy = 'http://' + _addoncompat.get_setting('us_proxy') + ':' + _addoncompat.get_setting('us_proxy_port') | |
proxy_handler = urllib2.ProxyHandler({'http' : us_proxy}) | |
if ((_addoncompat.get_setting('us_proxy_pass') is not '') and (_addoncompat.get_setting('us_proxy_user') is not '')): | |
print 'Using authenticated proxy: ' + us_proxy | |
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() | |
password_mgr.add_password(None, us_proxy, _addoncompat.get_setting('us_proxy_user'), _addoncompat.get_setting('us_proxy_pass')) | |
proxy_auth_handler = urllib2.ProxyBasicAuthHandler(password_mgr) | |
opener = urllib2.build_opener(proxy_handler, proxy_auth_handler, urllib2.HTTPCookieProcessor(cj)) | |
else: | |
print 'Using proxy: ' + us_proxy | |
opener = urllib2.build_opener(proxy_handler, urllib2.HTTPCookieProcessor(cj)) | |
return opener | |
def getURL(url, values = None, header = {}, amf = False, savecookie = False, loadcookie = False, connectiontype = _addoncompat.get_setting('connectiontype'), returncodeonerror = False): | |
try: | |
old_opener = urllib2._opener | |
cj = cookielib.LWPCookieJar(COOKIE) | |
if int(connectiontype) == 0: | |
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) | |
urllib2.install_opener(opener) | |
if int(connectiontype) == 1: | |
urllib2.install_opener(prepare_dns_proxy(cj)) | |
elif int(connectiontype) == 2: | |
urllib2.install_opener(prepare_us_proxy(cj)) | |
print '_connection :: getURL :: url = ' + url | |
if values is None: | |
req = urllib2.Request(bytes(url)) | |
else: | |
if amf == False: | |
data = urllib.urlencode(values) | |
elif amf == True: | |
data = values | |
req = urllib2.Request(bytes(url), data) | |
header.update({'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0'}) | |
for key, value in header.iteritems(): | |
req.add_header(key, value) | |
if loadcookie is True: | |
try: | |
cj.load(ignore_discard = True) | |
cj.add_cookie_header(req) | |
except: | |
print 'Cookie Loading Error' | |
pass | |
response = urllib2.urlopen(req) | |
link = response.read() | |
if savecookie is True: | |
try: | |
cj.save(ignore_discard = True) | |
except: | |
print 'Cookie Saving Error' | |
pass | |
response.close() | |
urllib2.install_opener(old_opener) | |
except urllib2.HTTPError, error: | |
print 'HTTP Error reason: ', error | |
if returncodeonerror: | |
return error.code | |
else: | |
return error.read() | |
else: | |
return link | |
def getRedirect(url, values = None , header = {}, connectiontype = _addoncompat.get_setting('connectiontype'), returncodeonerror = False): | |
try: | |
old_opener = urllib2._opener | |
cj = cookielib.LWPCookieJar(COOKIE) | |
if int(connectiontype) == 1: | |
urllib2.install_opener(prepare_dns_proxy(cj)) | |
elif int(connectiontype) == 2: | |
urllib2.install_opener(prepare_us_proxy(cj)) | |
print '_connection :: getRedirect :: url = ' + url | |
if values is None: | |
req = urllib2.Request(bytes(url)) | |
else: | |
data = urllib.urlencode(values) | |
req = urllib2.Request(bytes(url), data) | |
header.update({'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0'}) | |
if connectiontype == 2: | |
header.update({'X-Forwarded-For' : _addoncompat.get_setting('us_proxy')}) | |
for key, value in header.iteritems(): | |
req.add_header(key, value) | |
response = urllib2.urlopen(req) | |
finalurl = response.geturl() | |
response.close() | |
urllib2.install_opener(old_opener) | |
except urllib2.HTTPError, error: | |
print 'HTTP Error reason: ', error | |
if returncodeonerror: | |
return error.code | |
else: | |
return error.read() | |
else: | |
return finalurl |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python | |
# -*- coding: utf-8 -*- | |
import _addoncompat | |
import _common | |
import _connection | |
import _m3u8 | |
import os | |
import base64 | |
import re | |
import sys | |
import urllib | |
import time | |
import xbmc | |
import xbmcgui | |
import xbmcplugin | |
from bs4 import BeautifulSoup, SoupStrainer | |
pluginHandle = int(sys.argv[1]) | |
VIDEOURL = 'http://media.mtvnservices.com/' | |
DEVICE = 'Xbox' | |
def play_video(BASE, video_url = _common.args.url, media_base = VIDEOURL): | |
if media_base not in video_url: | |
video_url = media_base + video_url | |
try: | |
qbitrate = _common.args.quality | |
except: | |
qbitrate = None | |
video_url6 = 'stack://' | |
sbitrate = int(_addoncompat.get_setting('quality')) | |
closedcaption = [] | |
#Can we make this more generic | |
if 'southparkstudios' in video_url: | |
sp_id = video_url.split(':') | |
sp_id = sp_id[len(sp_id)-1] | |
feed_url = 'http://www.southparkstudios.com/feeds/video-player/mrss/mgid%3Aarc%3Aepisode%3Asouthparkstudios.com"%3A' + sp_id | |
elif 'feed' not in video_url: | |
swf_url = _connection.getRedirect(video_url, header = {'Referer' : BASE}) | |
params = dict(item.split("=") for item in swf_url.split('?')[1].split("&")) | |
uri = urllib.unquote_plus(params['uri']) | |
config_url = urllib.unquote_plus(params['CONFIG_URL'].replace('Other',DEVICE)) | |
config_data = _connection.getURL(config_url, header = {'Referer' : video_url, 'X-Forwarded-For' : '12.13.14.15'}) | |
feed_url = BeautifulSoup(config_data, 'html.parser', parse_only = SoupStrainer('feed')).feed.string | |
feed_url = feed_url.replace('{uri}', uri).replace('&', '&').replace('{device}', DEVICE).replace('{ref}', 'None').strip() | |
else: | |
feed_url = video_url | |
feed_data = _connection.getURL(feed_url) | |
video_tree = BeautifulSoup(feed_data, 'html.parser', parse_only = SoupStrainer('media:group')) | |
video_segments = video_tree.find_all('media:content') | |
for video_segment in video_segments[:1]:# enumerate(video_segments): | |
video_url3 = video_segment['url'].replace('{device}', DEVICE) | |
video_data3 = _connection.getURL(video_url3, header = {'X-Forwarded-For' : '12.13.14.15'}) | |
video_tree3 = BeautifulSoup(video_data3, 'html5lib') | |
try: | |
closedcaption.append(video_tree3.find('typographic', format = 'ttml')) | |
except: | |
pass | |
video_menu = video_tree3.find('src').string | |
hbitrate = -1 | |
lbitrate = -1 | |
m3u_master_data = _connection.getURL(video_menu, savecookie = True) | |
m3u_master = _m3u8.parse(m3u_master_data) | |
hbitrate = -1 | |
sbitrate = int(_addoncompat.get_setting('quality')) * 1024 | |
for video_index in m3u_master.get('playlists'): | |
bitrate = int(video_index.get('stream_info')['bandwidth']) | |
if qbitrate is None: | |
if bitrate > hbitrate and bitrate <= sbitrate: | |
hbitrate = bitrate | |
m3u8_url = video_index.get('uri') | |
elif bitrate == qbitrate: | |
m3u8_url = video_index.get('uri') | |
m3u8_urls = [] # list for all acts | |
if re.search('(act[0-9])', m3u8_url): | |
i = 1 | |
while i < 10: | |
m3u8_url_act = re.sub('(act[0-9])', 'act' + str(i), m3u8_url) | |
m3u8_urls.append(m3u8_url_act) | |
i = i + 1 | |
else: | |
m3u8_urls.append(m3u8_url) | |
act = 0 | |
m3u_data_full = '' | |
for m3u8_url in m3u8_urls: | |
act = act + 1 | |
m3u_data = _connection.getURL(m3u8_url, loadcookie = True, returncodeonerror = True) | |
if m3u_data == 403 or m3u_data == 404: | |
continue | |
key_url = re.compile('URI="(.*?)"').findall(m3u_data)[0] | |
key_data = _connection.getURL(key_url, loadcookie = True) | |
key_file = open(_common.KEYFILE + str(act), 'wb') | |
key_file.write(key_data) | |
key_file.close() | |
video_url5 = re.compile('(http:.*?)\n').findall(m3u_data) | |
for i, video_item in enumerate(video_url5): | |
newurl = base64.b64encode(video_item) | |
newurl = urllib.quote_plus(newurl) | |
m3u_data = m3u_data.replace(video_item, 'http://127.0.0.1:12345/foxstation/' + newurl) | |
m3u_data = m3u_data.replace(key_url, 'http://127.0.0.1:12345/play.key' + str(act)) | |
m3u_data = re.sub('#EXT-X-ENDLIST\n', '', m3u_data) | |
if act > 1: | |
m3u_data = re.sub('#EXTM3U\n', '', m3u_data) | |
m3u_data = re.sub('#EXT-X-TARGETDURATION:[0-9]*\n', '', m3u_data) | |
m3u_data = re.sub('#EXT-X-ALLOW-CACHE:[A-Z]*\n', '', m3u_data) | |
m3u_data = re.sub('#EXT-X-PLAYLIST-TYPE:[A-Z]*\n', '', m3u_data) | |
m3u_data = re.sub('#EXT-X-VERSION:[0-9]\n', '', m3u_data) | |
m3u_data = re.sub('#EXT-X-MEDIA-SEQUENCE:[0-9]\n', '', m3u_data) | |
m3u_data_full = m3u_data_full + m3u_data | |
m3u_data_full = m3u_data_full + "#EXT-X-ENDLIST" | |
playfile = open(_common.PLAYFILE, 'w') | |
playfile.write(m3u_data_full) | |
playfile.close() | |
filestring = 'XBMC.RunScript(' + os.path.join(_common.LIBPATH,'_proxy.py') + ', 12345)' | |
xbmc.executebuiltin(filestring) | |
finalurl = _common.PLAYFILE | |
localhttpserver = True | |
time.sleep(20) | |
if (_addoncompat.get_setting('enablesubtitles') == 'true') and (closedcaption is not None): | |
convert_subtitles(closedcaption) | |
item = xbmcgui.ListItem(path = finalurl) | |
if qbitrate is not None: | |
item.setThumbnailImage(_common.args.thumb) | |
item.setInfo('Video', { 'title' : _common.args.name, | |
'season' : _common.args.season_number, | |
'episode' : _common.args.episode_number, | |
'TVShowTitle' : _common.args.show_title}) | |
xbmcplugin.setResolvedUrl(pluginHandle, True, item) | |
if ((_addoncompat.get_setting('enablesubtitles') == 'true') and (closedcaption is not None)) or localhttpserver is True: | |
while not xbmc.Player().isPlaying(): | |
xbmc.sleep(100) | |
if (_addoncompat.get_setting('enablesubtitles') == 'true') and (closedcaption is not None): | |
xbmc.Player().setSubtitles(_common.SUBTITLE) | |
if localhttpserver is True: | |
while xbmc.Player().isPlaying(): | |
xbmc.sleep(10) | |
_connection.getURL('http://localhost:12345/stop', connectiontype = 0) | |
def list_qualities(BASE, video_url = _common.args.url, media_base = VIDEOURL): | |
if media_base not in video_url: | |
video_url = media_base + video_url | |
bitrates = [] | |
if 'feed' not in video_url: | |
swf_url = _connection.getRedirect(video_url, header = {'Referer' : BASE}) | |
params = dict(item.split("=") for item in swf_url.split('?')[1].split("&")) | |
uri = urllib.unquote_plus(params['uri']) | |
config_url = urllib.unquote_plus(params['CONFIG_URL']) | |
config_data = _connection.getURL(config_url, header = {'Referer' : video_url, 'X-Forwarded-For' : '12.13.14.15'}) | |
feed_url = BeautifulSoup(config_data, 'html.parser', parse_only = SoupStrainer('feed')).feed.string | |
feed_url = feed_url.replace('{uri}', uri).replace('&', '&').replace('{device}', DEVICE).replace('{ref}', 'None').strip() | |
else: | |
feed_url = video_url | |
feed_data = _connection.getURL(feed_url) | |
video_tree = BeautifulSoup(feed_data, 'html.parser', parse_only = SoupStrainer('media:group')) | |
video_segments = video_tree.find_all('media:content') | |
srates = [] | |
for video_segment in video_segments: | |
video_url3 = video_segment['url'].replace('{device}', DEVICE) | |
video_data3 = _connection.getURL(video_url3, header = {'X-Forwarded-For' : '12.13.14.15'}) | |
video_menu = BeautifulSoup(video_data3).findAll('rendition') | |
orates = srates | |
srates = [] | |
for video_index in video_menu: | |
bitrate = int(video_index['bitrate']) | |
srates.append((bitrate, bitrate)) | |
if orates != []: | |
srates = list(set(srates).intersection(orates)) | |
bitrates =srates | |
return bitrates | |
def clean_subs(data): | |
br = re.compile(r'<br.*?>') | |
tag = re.compile(r'<.*?>') | |
space = re.compile(r'\s\s\s+') | |
apos = re.compile(r'&apos;') | |
gt = re.compile(r'>+') | |
sub = br.sub('\n', data) | |
sub = tag.sub(' ', sub) | |
sub = space.sub(' ', sub) | |
sub = apos.sub('\'', sub) | |
sub = gt.sub('>', sub) | |
return sub | |
def convert_subtitles(closedcaption,durations=[]): | |
str_output = '' | |
j = 0 | |
count = 0 | |
for closedcaption_url in closedcaption: | |
count = count + 1 | |
if closedcaption_url is not None: | |
subtitle_data = _connection.getURL(closedcaption_url['src'], connectiontype = 0) | |
subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only = SoupStrainer('div')) | |
lines = subtitle_data.find_all('p') | |
for i, line in enumerate(lines): | |
if line is not None: | |
sub = clean_subs(_common.smart_utf8(line)) | |
try: | |
start_time = _common.smart_utf8(line['begin'][:-1].replace('.', ',')) | |
end_time = _common.smart_utf8(line['end'][:-1].replace('.', ',')) | |
str_output += str(j + i + 1) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n' | |
except: | |
pass | |
j = j + i + 1 | |
file = open(os.path.join(_common.CACHEPATH, 'subtitle-%s.srt' % int(count)), 'w') | |
file.write(str_output) | |
str_output='' | |
file.close() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python | |
# -*- coding: utf-8 -*- | |
import _addoncompat | |
import cookielib | |
import BaseHTTPServer | |
import os | |
import sys | |
import xbmc | |
import re | |
import base64 | |
import socket | |
import traceback | |
import time | |
import urllib | |
import urllib2 | |
PLUGINPATH = xbmc.translatePath(_addoncompat.get_path()) | |
RESOURCESPATH = os.path.join(PLUGINPATH,'resources') | |
CACHEPATH = os.path.join(RESOURCESPATH,'cache') | |
VIDEOPATH = os.path.join(CACHEPATH,'videos') | |
KEYFILE = os.path.join(CACHEPATH,'play.key') | |
COOKIE = os.path.join(CACHEPATH,'cookie.txt') | |
HOST_NAME = 'localhost' | |
PORT_NUMBER = int(sys.argv[1]) | |
class StoppableHTTPServer(BaseHTTPServer.HTTPServer): | |
def serve_forever(self): | |
self.stop = False | |
while not self.stop: | |
self.handle_request() | |
class StoppableHttpRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): | |
def _writeheaders(self): | |
self.send_response(200) | |
self.send_header('Content-type', 'text/html') | |
self.end_headers() | |
def do_HEAD(self): | |
self._writeheaders() | |
def do_GET(self): | |
print 'XBMCLocalProxy: Serving GET request...' | |
self.answer_request(1) | |
def answer_request(self, sendData): | |
request_path = self.path[1:] | |
request_path = re.sub(r'\?.*', '', request_path) | |
if 'stop' in self.path: | |
self._writeheaders() | |
self.server.stop = True | |
print 'Server stopped' | |
elif 'play.key' in self.path: | |
try: | |
self._writeheaders() | |
file = open(KEYFILE.replace('play.key', request_path), 'r') | |
data = file.read() | |
self.wfile.write(data) | |
file.close() | |
except IOError: | |
self.send_error(404, 'File Not Found: %s' % self.path) | |
return | |
elif 'foxstation' in self.path: | |
realpath = urllib.unquote_plus(request_path[11:]) | |
fURL = base64.b64decode(realpath) | |
self.serveFile(fURL, sendData) | |
def serveFile(self, fURL, sendData): | |
cj = cookielib.LWPCookieJar(COOKIE) | |
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) | |
request = urllib2.Request(url = fURL) | |
opener.addheaders = [] | |
d = {} | |
sheaders = self.decodeHeaderString(''.join(self.headers.headers)) | |
for key in sheaders: | |
d[key] = sheaders[key] | |
if (key != 'Host'): | |
opener.addheaders = [(key, sheaders[key])] | |
if (key == 'User-Agent'): | |
opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0')] | |
if os.path.isfile(COOKIE): | |
cj.load(ignore_discard = True) | |
cj.add_cookie_header(request) | |
response = opener.open(request) | |
self.send_response(200) | |
print 'XBMCLocalProxy: Sending headers...' | |
headers = response.info() | |
for key in headers: | |
try: | |
val = headers[key] | |
self.send_header(key, val) | |
except Exception, e: | |
print e | |
pass | |
self.end_headers() | |
if (sendData): | |
print 'XBMCLocalProxy: Sending data...' | |
fileout = self.wfile | |
try: | |
buf = 'INIT' | |
try: | |
while ((buf != None) and (len(buf) > 0)): | |
buf = response.read(8 * 1024) | |
fileout.write(buf) | |
fileout.flush() | |
response.close() | |
fileout.close() | |
print time.asctime(), 'Closing connection' | |
except socket.error, e: | |
print time.asctime(), 'Client closed the connection.' | |
try: | |
response.close() | |
fileout.close() | |
except Exception, e: | |
return | |
except Exception, e: | |
traceback.print_exc(file = sys.stdout) | |
response.close() | |
fileout.close() | |
except: | |
traceback.print_exc() | |
fileout.close() | |
return | |
try: | |
fileout.close() | |
except: | |
pass | |
def decodeHeaderString(self, hs): | |
di = {} | |
hss = hs.replace('\r', '').split('\n') | |
for line in hss: | |
u = line.split(': ') | |
try: | |
di[u[0]] = u[1] | |
except: | |
pass | |
return di | |
def runserver(server_class = StoppableHTTPServer, | |
handler_class = StoppableHttpRequestHandler): | |
server_address = (HOST_NAME, PORT_NUMBER) | |
httpd = server_class(server_address, handler_class) | |
httpd.serve_forever() | |
if __name__ == '__main__': | |
runserver() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment