Skip to content

Instantly share code, notes, and snippets.

@brendancol
Last active December 19, 2015 22:49
Show Gist options
  • Select an option

  • Save brendancol/6030720 to your computer and use it in GitHub Desktop.

Select an option

Save brendancol/6030720 to your computer and use it in GitHub Desktop.
Simple Geoprocessing Service Concurrent User Test
import multiprocessing
import os
import urllib2
import time
import json
import random
def concurrent_request_worker(url):
countries = ['AL', 'AO', 'AM', 'AZ', 'BD', 'BJ', 'BO', 'BT', 'BR', 'BF', 'BU', 'KH', 'CM', 'CV', 'CF', 'TD', 'CO', 'KM', 'CG', 'CD', 'CI', 'DR', 'EC', 'EG', 'ES', 'ER', 'ET', 'GA', 'GE', 'GH', 'GU', 'GN', 'GY', 'HT', 'HN', 'IA', 'ID', 'JM', 'JO', 'KK', 'KE', 'KY', 'LS', 'LB', 'MD', 'MW', 'MV', 'ML', 'MR', 'MX', 'MB', 'MA', 'MZ', 'NM', 'NP', 'NC', 'NI', 'NG', 'PK', 'PY', 'PE', 'PH', 'RO', 'RW', 'ST', 'SN', 'SL', 'ZA', 'LK', 'SD', 'SZ', 'TZ', 'TH', 'TL', 'TG', 'TT', 'TN', 'TR', 'TM', 'UG', 'UA', 'UZ', 'VN', 'YE', 'ZM', 'ZW']
rand_countries = ','.join(random.sample(countries, 20))
is_executing = True
success = None
result = None
errors = None
start_timer = time.time()
resp = urllib2.urlopen( url.format(rand_countries) )
content = resp.read()
content = json.loads(content)
polling_url = url.split('submitJob')[0] + 'jobs/{}'.format(content['jobId'])
while is_executing:
poll = urllib2.urlopen(polling_url + '?f=json')
poll_response = json.loads(poll.read())
if poll_response['jobStatus'] == 'esriJobSucceeded':
is_executing = False
success = poll_response['jobStatus']
result = urllib2.urlopen(polling_url + '/results/result?f=json').read()
errors = urllib2.urlopen(polling_url + '/results/errors?f=json').read()
elif poll_response['jobStatus'] == 'esriJobFailed':
is_executing = False
success = poll_response['jobStatus']
else:
time.sleep(1)
latency = time.time() - start_timer
return success, latency, url, result, errors
def concurrent_request_complete(results):
success, latency, url, result, errors = results
print '%s : %.3f : %s secs \n' % (success, latency, url)
print 'Result: {}\n'.format(result)
print 'Errors: {}\n'.format(errors)
print "====================================================="
def run_concurrent_requests(url, numRequests=3):
pool = multiprocessing.Pool(numRequests)
for i in range(0,numRequests,1):
arguments = (url,)
pool.apply_async(concurrent_request_worker, arguments, callback=concurrent_request_complete)
pool.close()
pool.join()
if __name__ == '__main__':
GET_NATIONAL_DATA_URL = r'http://gis101.measuredhs.com/arcgis/rest/services/SDR/GetNationalData/GPServer/getNationalData/submitJob?f=json&indicator_ids=quickstats&country_ids={}&years=&types=&spatial_format=shp'
GET_SUB_NATIONAL_DATA_URL = r'http://gis101.measuredhs.com/arcgis/rest/services/SDR/GetNationalData/GPServer/getNationalData/submitJob?f=json&indicator_ids=quickstats&country_ids={}&years=&types=&spatial_format=shp'
run_concurrent_requests(GET_NATIONAL_DATA_URL, numRequests=6)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment