Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Select an option

  • Save readikus/0e94faa7f2ea11fa1e3be2e771d478af to your computer and use it in GitHub Desktop.

Select an option

Save readikus/0e94faa7f2ea11fa1e3be2e771d478af to your computer and use it in GitHub Desktop.
'''
Experimenting with asyncio.TaskGroup to load a collection of URLs. This provides a similar approach as
Promise.all in JavaScript
'''
import aiohttp
import asyncio
from codetiming import Timer
import urllib
async def main():
async with asyncio.TaskGroup() as tg:
task1 = tg.create_task(some_coro(...))
task2 = tg.create_task(another_coro(...))
print("Both tasks have completed now.")
async def get_url_async(url):
async with aiohttp.ClientSession() as session:
with Timer(text=f"(Async) Downloading {url} elapsed time: {{:.1f}}"):
async with session.get(url) as response:
await response.text()
async def get_all_urls_async(urls):
async with asyncio.TaskGroup() as group:
for url in urls:
group.create_task(get_url_async(url))
def get_url_sync(url):
with Timer(text=f"(sync) Downloading {url} elapsed time: {{:.1f}}"):
response = urllib.request.urlopen(url)
response.read()
def get_all_urls_sync(urls):
for url in urls:
get_url_sync(url)
async def main():
urls = [
"http://google.com",
"http://yahoo.com",
"http://linkedin.com",
"http://apple.com",
"http://microsoft.com",
"http://facebook.com",
"http://twitter.com",
]
with Timer(text="\n(async) Total elapsed time: {:.1f}"):
await get_all_urls_async(urls)
with Timer(text="\n(sync) Total elapsed time: {:.1f}"):
get_all_urls_sync(urls)
if __name__ == "__main__":
asyncio.run(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment