Created
January 4, 2014 22:36
-
-
Save askmeegs/8261675 to your computer and use it in GitHub Desktop.
search handler for TVNewsPulse
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class SearchHandler(webapp2.RequestHandler): | |
def post(self): #not the same post as jquery $.post | |
searchterm = self.request.get("searchterm") | |
modifiedToken = cleanToken(searchterm) | |
user = users.get_current_user() | |
s="" | |
results="" | |
if user: | |
user_id=user.user_id() | |
key=str(user.user_id()+searchterm) | |
obj = SaveSearches(key_name=key, userid=user.user_id(), searches=searchterm) | |
obj.put() | |
search=db.Query(SaveSearches).filter('userid = ', user.user_id()).order("-dateAdded") | |
if search.count()>0: | |
i=0 | |
while(i<search.count()): | |
s=s+'<option>'+search[i].searches+'</option>' | |
i=i+1 | |
else: | |
obj = SaveSearches(userid="Anonymous", searches=searchterm) | |
obj.put() | |
#query to see if this search is in the datastore | |
q = Search.all() | |
q.filter("token =", searchterm) #see if a search exists where the searchterm equals the token | |
search = q.get() | |
print "on querying, got " + str(search) | |
if search is None: #...is this search isn't already in the datastore, reference processData.py helper functions | |
data = compileData(modifiedToken) #get the data from TV News | |
numResults = len(data) | |
parsedDates = separateDates(data) #parse out the dates from the json data | |
dateFreq = str(countFrequency(parsedDates)) #make a date frequency dictionary | |
networkFreq = str(classifyNetworks(data)) #make a network frequency dictionary | |
#define and put a new search entity with information gotten from helper functions | |
search = Search(token = searchterm, numberResults = numResults, dateFrequency = dateFreq, networkFrequency = networkFreq) | |
search.put() | |
#now we have a search no matter what, either newly created or old from datastore | |
#let's parse: | |
numResults = search.numberResults | |
#parse our date frequency dictionary so that it's in string form for Javascript to parse and read | |
#steps: | |
# 1)decompose the data extracted from the datastore- eliminating filler characters, keeping only the values | |
# 2)turn the values into a list of strings, by splitting the string | |
# 3)reconstruct the string so that the values are in the order required by Google Charts Annotated Time Line (dates and frequencies) | |
# 4)do this systematically so that every date in the data is formatted correctly | |
# 5)repeat the steps for the networkFrequency, but change how the data are formatted (because we're prepping for the Google Pie Chart now) | |
list1= search.dateFrequency.replace("OrderedDict","").replace("), (", ",").replace("([(","").replace(")])","").replace("'","").replace(" ","").split(",") | |
processedString="" | |
i = 0 | |
while (i+3 < len(list1)): | |
processedString=processedString+list1[i+2]+","+list1[i]+","+list1[i+1]+","+list1[i+3]+"," | |
i = i + 4 | |
#parse our network frequency data into a string that's readable for JavaScript | |
#prepares the string by turning it into a list of primitive integers(in type string)in the appropriate order for google pie chart | |
list2= search.networkFrequency.replace("), (",",").replace("[(","").replace(")]","").replace("'","").replace(" ","").split(",") | |
processedString2="" | |
i = 0 | |
while (i+1 < len(list2)): | |
processedString2=processedString2+list2[i]+","+list2[i+1]+"," | |
i = i + 2 | |
displayNotes = "" | |
#if a user has logged in, query the user's saved notes and store them in variable displayNotes | |
if user: | |
key=str(user.user_id()+searchterm) #the key has to be unique so that notes for a particular searchterm can be updated (overwrite-able) | |
temp=SaveNotes.get_by_key_name(key) | |
if temp is not None: | |
displayNotes=temp.notes | |
else: | |
displayNotes="" | |
login_url = users.create_login_url(self.request.path) | |
logout_url = users.create_logout_url(self.request.path) | |
responseDict = { | |
'token': search.token, | |
'numResults' : numResults, | |
'timeLineDisplay': processedString, | |
'pieChartDisplay': processedString2, | |
'information': results, | |
'scrollList': s, | |
'currentTerm': search.token, | |
'notes': displayNotes | |
} | |
#dump all data into a dictionary and write it | |
modifiedDict = json.dumps(responseDict) | |
self.response.write(modifiedDict) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment