Skip to content

Instantly share code, notes, and snippets.

@johnidm
Created March 31, 2025 15:54
Show Gist options
  • Save johnidm/cee1d9b8370065a640ed9f67d808c743 to your computer and use it in GitHub Desktop.
Save johnidm/cee1d9b8370065a640ed9f67d808c743 to your computer and use it in GitHub Desktop.
Making multiple HTTP requests using Python

Synchronous

import requests
import time


urls = [
    "http://www.google.com",
    "http://www.microsoft.com",
    "http://www.apple.com",
    "http://www.amazon.com",
    "http://www.facebook.com",
    "http://www.ibm.com",
    "http://www.oracle.com",
    "http://www.intel.com",
    "http://www.nvidia.com",
    "http://www.samsung.com",
]
results = []


def run_tasks():
    for url in urls:
        response = requests.get(url, verify=False)
        results.append(response.text)


print("Timer started...")
start = time.perf_counter()
run_tasks()
end = time.perf_counter()
total_time = end - start
print("It took {} seconds to make {} API calls".format(total_time, len(urls)))

Synchronous Multithreading

import time
import requests

urls = [
    "http://www.google.com",
    "http://www.microsoft.com",
    "http://www.apple.com",
    "http://www.amazon.com",
    "http://www.facebook.com",
    "http://www.ibm.com",
    "http://www.oracle.com",
    "http://www.intel.com",
    "http://www.nvidia.com",
    "http://www.samsung.com",
]
results = []


def run_tasks():
    import concurrent.futures

    def fetch_url(url):
        response = requests.get(url, verify=False)
        return response.text

    with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
        results.extend(executor.map(fetch_url, urls))


print("Timer started...")
start = time.perf_counter()
run_tasks()
end = time.perf_counter()
total_time = end - start
print("It took {} seconds to make {} API calls".format(total_time, len(urls)))

Asynchronous

import aiohttp
import asyncio
import time


urls = [
    "http://www.google.com",
    "http://www.microsoft.com",
    "http://www.apple.com",
    "http://www.amazon.com",
    "http://www.facebook.com",
    "http://www.ibm.com",
    "http://www.oracle.com",
    "http://www.intel.com",
    "http://www.nvidia.com",
    "http://www.samsung.com",
]
results = []


async def run_tasks():
    async with aiohttp.ClientSession() as session:
        for url in urls:
            response = await session.get(url, ssl=False)
            results.append(response.text)


print("Timer started...")
start = time.perf_counter()
asyncio.run(run_tasks())
end = time.perf_counter()
total_time = end - start
print("It took {} seconds to make {} API calls".format(total_time, len(urls)))

Asynchronous Multithreading

import aiohttp
import asyncio
import time


urls = [
    "http://www.google.com",
    "http://www.microsoft.com",
    "http://www.apple.com",
    "http://www.amazon.com",
    "http://www.facebook.com",
    "http://www.ibm.com",
    "http://www.oracle.com",
    "http://www.intel.com",
    "http://www.nvidia.com",
    "http://www.samsung.com",
]
results = []


def get_tasks(session):
    tasks = [session.get(url, ssl=False) for url in urls]
    return tasks

async def run_tasks():
    async with aiohttp.ClientSession() as session:
        tasks = get_tasks(session)
        responses = await asyncio.gather(*tasks)
        for response in responses:
            results.append(response.text)


print("Timer started...")
start = time.perf_counter()
asyncio.run(run_tasks())
end = time.perf_counter()
total_time = end - start
print("It took {} seconds to make {} API calls".format(total_time, len(urls)))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment