Skip to content

Instantly share code, notes, and snippets.

@guyromm
Last active May 1, 2016 11:18
Show Gist options
  • Select an option

  • Save guyromm/3289ab7b1c1452f1a608d89adbd0c24a to your computer and use it in GitHub Desktop.

Select an option

Save guyromm/3289ab7b1c1452f1a608d89adbd0c24a to your computer and use it in GitHub Desktop.
#!/usr/bin/env python from future import division
import sys
import datetime
import urllib,urllib2
import json
from collections import defaultdict
fr = sys.argv[1] #example: 2016-03-01
to = sys.argv[2] # example: 2016-03-31
token=sys.argv[3] # SECRET TOKEN from https://screenshotmonitor.com/account
headers = {'X-SSM-Token':token,
'Content-type':'application/json'
}
def prepare_request(path,data=''):
base_url = 'https://screenshotmonitor.com/api/v2/'
req = urllib2.Request(base_url+path,data)
for k,v in headers.items(): req.add_header(k,v)
return req
def get_common_data():
req = prepare_request('GetCommonData')
resp = urllib2.urlopen(req)
return json.loads(resp.read())
def get_activities(fr,to,employment_ids):
date_from = datetime.datetime.strptime(fr,'%Y-%m-%d')
date_to = datetime.datetime.strptime(to,'%Y-%m-%d')
pst = [{'employmentId':employment_id,
'from':date_from.strftime('%s'),
'to':date_to.strftime('%s')} for employment_id in employment_ids]
#print pst
pste = json.dumps(pst)
#print pste
req = prepare_request('GetActivities',pste)
resp = urllib2.urlopen(req)
return json.loads(resp.read())
def aggregate(act,common):
def rd(x,y):
x.update(dict([(y1['id'],y1['name']) for y1 in y['employments']]))
return x
all_employments = reduce(rd,common['companies'],{})
agg=defaultdict(datetime.timedelta)
for i in act:
fr = datetime.datetime.fromtimestamp(i['from'])
to = datetime.datetime.fromtimestamp(i['to'])
l = to-fr
eid = i['employmentId']
agg[eid]+=l
#print fr,to,l,all_employments[eid]
return agg,all_employments
if __name__=='__main__':
if 'stdin' in sys.argv:
d = json.loads(sys.stdin.read())
act,common = (d['activities'],d['common_data'])
else:
common = get_common_data()
employment_ids = [employment['id'] for company in common['companies'] for employment in company['employments']]
act = get_activities(fr,to,employment_ids)
print json.dumps({'common_data':common,
'activities':act})
agg,all_employments = aggregate(act,common)
for eid,l in agg.items():
print all_employments[eid],float((l.days*86400)+l.seconds)/60/60
#!/usr/bin/env python from future import division
import sys
import datetime
import urllib,urllib2
import json
from collections import defaultdict
fr = sys.argv[1] #example: 2016-03-01
to = sys.argv[2] # example: 2016-03-31
token=sys.argv[3] # SECRET TOKEN from https://screenshotmonitor.com/account
headers = {'X-SSM-Token':token,
'Content-type':'application/json'
}
def prepare_request(path,data=''):
base_url = 'https://screenshotmonitor.com/api/v2/'
req = urllib2.Request(base_url+path,data)
for k,v in headers.items(): req.add_header(k,v)
return req
def get_common_data():
req = prepare_request('GetCommonData')
resp = urllib2.urlopen(req)
return json.loads(resp.read())
def get_activities(fr,to,employment_ids):
date_from = datetime.datetime.strptime(fr,'%Y-%m-%d')
date_to = datetime.datetime.strptime(to,'%Y-%m-%d')
pst = [{'employmentId':employment_id,
'from':date_from.strftime('%s'),
'to':date_to.strftime('%s')} for employment_id in employment_ids]
#print pst
pste = json.dumps(pst)
#print pste
req = prepare_request('GetActivities',pste)
resp = urllib2.urlopen(req)
return json.loads(resp.read())
def aggregate(act,common):
def rd(x,y):
x.update(dict([(y1['id'],y1['name']) for y1 in y['employments']]))
return x
all_employments = reduce(rd,common['companies'],{})
agg=defaultdict(datetime.timedelta)
for i in act:
fr = datetime.datetime.fromtimestamp(i['from'])
to = datetime.datetime.fromtimestamp(i['to'])
l = to-fr
eid = i['employmentId']
agg[eid]+=l
#print fr,to,l,all_employments[eid]
return agg,all_employments
if __name__=='__main__':
if 'stdin' in sys.argv:
d = json.loads(sys.stdin.read())
act,common = (d['activities'],d['common_data'])
else:
common = get_common_data()
employment_ids = [employment['id'] for company in common['companies'] for employment in company['employments']]
act = get_activities(fr,to,employment_ids)
print json.dumps({'common_data':common,
'activities':act})
agg,all_employments = aggregate(act,common)
for eid,l in agg.items():
print all_employments[eid],float((l.days*86400)+l.seconds)/60/60
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment