Created
April 12, 2013 14:16
-
-
Save atupal/5372327 to your computer and use it in GitHub Desktop.
python threading
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import threading | |
import datetime | |
class ThreadClass(threading.Thread): | |
def run(self): | |
now = datetime.datetime.now() | |
print "%s says Hello World at time: %s" % | |
(self.getName(), now) | |
for i in range(2): | |
t = ThreadClass() | |
t.start() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import Queue | |
import threading | |
import urllib2 | |
import time | |
from BeautifulSoup import BeautifulSoup | |
hosts = ["http://yahoo.com", "http://google.com", "http://amazon.com", | |
"http://ibm.com", "http://apple.com"] | |
queue = Queue.Queue() | |
out_queue = Queue.Queue() | |
class ThreadUrl(threading.Thread): | |
"""Threaded Url Grab""" | |
def __init__(self, queue, out_queue): | |
threading.Thread.__init__(self) | |
self.queue = queue | |
self.out_queue = out_queue | |
def run(self): | |
while True: | |
#grabs host from queue | |
host = self.queue.get() | |
#grabs urls of hosts and then grabs chunk of webpage | |
url = urllib2.urlopen(host) | |
chunk = url.read() | |
#place chunk into out queue | |
self.out_queue.put(chunk) | |
#signals to queue job is done | |
self.queue.task_done() | |
class DatamineThread(threading.Thread): | |
"""Threaded Url Grab""" | |
def __init__(self, out_queue): | |
threading.Thread.__init__(self) | |
self.out_queue = out_queue | |
def run(self): | |
while True: | |
#grabs host from queue | |
chunk = self.out_queue.get() | |
#parse the chunk | |
soup = BeautifulSoup(chunk) | |
print soup.findAll(['title']) | |
#signals to queue job is done | |
self.out_queue.task_done() | |
start = time.time() | |
def main(): | |
#spawn a pool of threads, and pass them queue instance | |
for i in range(5): | |
t = ThreadUrl(queue, out_queue) | |
t.setDaemon(True) | |
t.start() | |
#populate queue with data | |
for host in hosts: | |
queue.put(host) | |
for i in range(5): | |
dt = DatamineThread(out_queue) | |
dt.setDaemon(True) | |
dt.start() | |
#wait on the queue until everything has been processed | |
queue.join() | |
out_queue.join() | |
main() | |
print "Elapsed Time: %s" % (time.time() - start) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import urllib2 | |
import time | |
hosts = ["http://yahoo.com", "http://google.com", "http://amazon.com", | |
"http://ibm.com", "http://apple.com"] | |
start = time.time() | |
#grabs urls of hosts and prints first 1024 bytes of page | |
for host in hosts: | |
url = urllib2.urlopen(host) | |
print url.read(1024) | |
print "Elapsed Time: %s" % (time.time() - start) | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
import Queue | |
import threading | |
import urllib2 | |
import time | |
hosts = ["http://yahoo.com", "http://google.com", "http://amazon.com", | |
"http://ibm.com", "http://apple.com"] | |
queue = Queue.Queue() | |
class ThreadUrl(threading.Thread): | |
"""Threaded Url Grab""" | |
def __init__(self, queue): | |
threading.Thread.__init__(self) | |
self.queue = queue | |
def run(self): | |
while True: | |
#grabs host from queue | |
host = self.queue.get() | |
#grabs urls of hosts and prints first 1024 bytes of page | |
url = urllib2.urlopen(host) | |
print url.read(1024) | |
#signals to queue job is done | |
self.queue.task_done() | |
start = time.time() | |
def main(): | |
#spawn a pool of threads, and pass them queue instance | |
for i in range(5): | |
t = ThreadUrl(queue) | |
t.setDaemon(True) | |
t.start() | |
#populate queue with data | |
for host in hosts: | |
queue.put(host) | |
#wait on the queue until everything has been processed | |
queue.join() | |
main() | |
print "Elapsed Time: %s" % (time.time() - start) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment