This is the reference point. All the other options are based off this.
|-- app
| |-- controllers
| | |-- admin
#!/usr/bin/env python | |
# -*- coding: utf-8 -*- | |
import urllib2 | |
gh_url = 'https://api.github.com' | |
req = urllib2.Request(gh_url) | |
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() |
[user] | |
name = Pavan Kumar Sunkara | |
email = [email protected] | |
username = pksunkara | |
[init] | |
defaultBranch = master | |
[core] | |
editor = nvim | |
whitespace = fix,-indent-with-non-tab,trailing-space,cr-at-eol | |
pager = delta |
The MIT License (MIT) | |
Copyright (c) 2016 Stuart Powers | |
Permission is hereby granted, free of charge, to any person obtaining a copy | |
of this software and associated documentation files (the "Software"), to deal | |
in the Software without restriction, including without limitation the rights | |
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |
copies of the Software, and to permit persons to whom the Software is | |
furnished to do so, subject to the following conditions: |
#!/usr/bin/python | |
__doc__ = """Tiny HTTP Proxy. | |
This module implements GET, HEAD, POST, PUT and DELETE methods | |
on BaseHTTPServer, and behaves as an HTTP proxy. The CONNECT | |
method is also implemented experimentally, but has not been | |
tested yet. | |
Any help will be greatly appreciated. SUZUKI Hisao |
#author : osman ehmad | |
import urllib2 | |
page = urllib2.urlopen('http://www.javascript-coder.com/files/window-popup/javascript-window-open-example1.html') | |
source = page.read() | |
# SCRIPT or script will depend upon pages you want crawled | |
# or both can easily be incorporated |
git fetch upstream | |
git reset --hard upstream/master |
#!/usr/bin/env python | |
""" | |
Very simple HTTP server in python (Updated for Python 3.7) | |
Usage: | |
./dummy-web-server.py -h | |
./dummy-web-server.py -l localhost -p 8000 | |
Send a GET request: |