Skip to content

Instantly share code, notes, and snippets.

@emilepetrone
Created October 13, 2011 17:51
Show Gist options
  • Save emilepetrone/1284930 to your computer and use it in GitHub Desktop.
Save emilepetrone/1284930 to your computer and use it in GitHub Desktop.
import xlrd, xlwt
from xlutils.copy import copy
import time
from simplegeo import Client, json, APIError
#from multiprocessing.pool import ThreadPool as Pool
from multiprocessing import Pool
client = Client('SbAc87cM7EK2qCvmjymQtXU66MdXGNPr','CNmJaCEGGY3V7Tq3NEJsvNJCdJZjvgfN')
neighborhoods = {}
def f(z):
z = str(z)
error = ""
n = []
try:
# z = str(10001)
results = client.context.get_context_by_address(z)
for b in results["features"]:
if b["classifiers"][0]["category"] == 'Neighborhood':
n = b["name"]
except APIError, e:
return (z, error)
# print neighborhoods
# print "%s hoods=%s." % (z, len(neighborhoods))
return (z, n)
if __name__ == '__main__':
book = xlrd.open_workbook("zipcodes.xls")
sheet = book.sheet_by_index(0)
rows = sheet.nrows
row = 1
zipcodes = set()
while row <= (rows - 1) :
# while row <= 5000:
zip = sheet.cell_value(row,0)
zip = int(zip)
z = str(zip)
z = z.zfill(5)
zipcodes.add(z)
row += 1
print "Found %s codes" % len(zipcodes)
zipcodes = set(list(zipcodes))
# Check if the zipcode is not in the list, save to new file
w_sheet_dict = {}
rb = xlrd.open_workbook("results.xls")
r_sheet = rb.sheet_by_index(0)
wb = copy(rb)
w_sheet = wb.get_sheet(0)
rows2 = r_sheet.nrows
r = 1
while r <= rows2:
w_sheet_dict[sheet.cell_value(r,0)] = [sheet.cell_value(r,1)]
r += 1
# Multiprocessing pool
start = time.time()
pool = Pool(16)
result = pool.map(f, zipcodes)
result = filter(lambda x :len(x[1]) > 0, result)
print result
# Writing results
rows2 = rows2 + 1
for x in result:
if not x in w_sheet_dict:
w_sheet.write((rows2),0,str(x[0]))
w_sheet.write((rows2),1,str(x[1]))
rows2 += 1
wb.save("results.xls")
print len(result)
print 'Duration %s' % (time.time() - start, )
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment