import requests
import time
urls = [
"http://www.google.com",
"http://www.microsoft.com",
"http://www.apple.com",
"http://www.amazon.com",
"http://www.facebook.com",
"http://www.ibm.com",
"http://www.oracle.com",
"http://www.intel.com",
"http://www.nvidia.com",
"http://www.samsung.com",
]
results = []
def run_tasks():
for url in urls:
response = requests.get(url, verify=False)
results.append(response.text)
print("Timer started...")
start = time.perf_counter()
run_tasks()
end = time.perf_counter()
total_time = end - start
print("It took {} seconds to make {} API calls".format(total_time, len(urls)))
Synchronous Multithreading
import time
import requests
urls = [
"http://www.google.com",
"http://www.microsoft.com",
"http://www.apple.com",
"http://www.amazon.com",
"http://www.facebook.com",
"http://www.ibm.com",
"http://www.oracle.com",
"http://www.intel.com",
"http://www.nvidia.com",
"http://www.samsung.com",
]
results = []
def run_tasks():
import concurrent.futures
def fetch_url(url):
response = requests.get(url, verify=False)
return response.text
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
results.extend(executor.map(fetch_url, urls))
print("Timer started...")
start = time.perf_counter()
run_tasks()
end = time.perf_counter()
total_time = end - start
print("It took {} seconds to make {} API calls".format(total_time, len(urls)))
import aiohttp
import asyncio
import time
urls = [
"http://www.google.com",
"http://www.microsoft.com",
"http://www.apple.com",
"http://www.amazon.com",
"http://www.facebook.com",
"http://www.ibm.com",
"http://www.oracle.com",
"http://www.intel.com",
"http://www.nvidia.com",
"http://www.samsung.com",
]
results = []
async def run_tasks():
async with aiohttp.ClientSession() as session:
for url in urls:
response = await session.get(url, ssl=False)
results.append(response.text)
print("Timer started...")
start = time.perf_counter()
asyncio.run(run_tasks())
end = time.perf_counter()
total_time = end - start
print("It took {} seconds to make {} API calls".format(total_time, len(urls)))
Asynchronous Multithreading
import aiohttp
import asyncio
import time
urls = [
"http://www.google.com",
"http://www.microsoft.com",
"http://www.apple.com",
"http://www.amazon.com",
"http://www.facebook.com",
"http://www.ibm.com",
"http://www.oracle.com",
"http://www.intel.com",
"http://www.nvidia.com",
"http://www.samsung.com",
]
results = []
def get_tasks(session):
tasks = [session.get(url, ssl=False) for url in urls]
return tasks
async def run_tasks():
async with aiohttp.ClientSession() as session:
tasks = get_tasks(session)
responses = await asyncio.gather(*tasks)
for response in responses:
results.append(response.text)
print("Timer started...")
start = time.perf_counter()
asyncio.run(run_tasks())
end = time.perf_counter()
total_time = end - start
print("It took {} seconds to make {} API calls".format(total_time, len(urls)))