Created
August 1, 2017 05:21
-
-
Save dineshsprabu/0c83e81150784eafa6de6692284029c2 to your computer and use it in GitHub Desktop.
[Python][Async] Understanding Futures and ThreadPoolExecutor for Async
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from concurrent import futures | |
import requests | |
def get_data(url): | |
if url == 0: | |
return "working" | |
return requests.get(url).text | |
# Running a blocking task on executor. | |
# Increasing ThreadPool size or setting it to None, | |
# changes the order of results. | |
with futures.ThreadPoolExecutor(None) as executor: | |
urls = ['http://www.fakeresponse.com/api/?sleep=10', 'https://httpbin.org/ip', 0] | |
# list of jobs, can be made with [for]. | |
jobs = [executor.submit(get_data, url) for url in urls] | |
# Returns result on completion of each job. | |
for comp_job in futures.as_completed(jobs): | |
print(comp_job.result()) | |
# Wait till all finished example. | |
with futures.ThreadPoolExecutor(None) as executor: | |
url_slow = 'http://www.fakeresponse.com/api/?sleep=10' | |
url_fast = 'https://httpbin.org/ip' | |
# list of jobs | |
slow_job = executor.submit(get_data, url_slow) | |
fast_job = executor.submit(get_data, url_fast) | |
# Blocks till all the jobs completed. | |
futures.wait([slow_job, fast_job], timeout=None, return_when=futures.ALL_COMPLETED) | |
# Method to check a job is done or not. | |
print(slow_job.done()) | |
# Print results after completion. | |
print(slow_job.result()) | |
print(fast_job.result()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment