Created
December 11, 2014 10:38
-
-
Save cllu/d67f696db3f4033040dd to your computer and use it in GitHub Desktop.
Python 3 asynchronous HTTP request with aiohttp and asyncio
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import asyncio | |
import aiohttp | |
def _crawl_url(url): | |
try: | |
resp = yield from asyncio.wait_for(aiohttp.request('GET', url, allow_redirects=True), 10) | |
""":type resp: aiohttp.client.ClientResponse""" | |
resp.text = yield from asyncio.wait_for(resp.text(), 10) | |
return resp | |
except Exception as e: | |
print('failed to crawl url:', url) | |
return None | |
def crawl_urls(urls): | |
# run the crawling | |
coros = [] | |
for url in urls: | |
coros.append(asyncio.Task(_crawl_url(url))) | |
future = asyncio.gather(*coros) | |
asyncio.get_event_loop().run_until_complete(future) | |
return future.result() | |
if __name__ == '__main__': | |
urls = [ | |
'http://httpbin.org/get?q=1', | |
'http://httpbin.org/get?q=2', | |
'http://httpbin.org/get?q=3', | |
'http://httpbin.org/get?q=4', | |
'http://httpbin.org/get?q=5', | |
] | |
print([(resp.status, resp.text) for resp in crawl_urls(urls)]) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment