This script uses RCurl and RJSONIO to download data from Google's API to get the latitude, longitude, location type, and formatted address
library(RCurl)
library(RJSONIO)
library(plyr)
| /*********** | |
| * Collects the reporting results from all accounts | |
| * and generates a nicely formatted email. If there | |
| * are errors for an account, it includes those | |
| * in the email as well since an error in one account | |
| * won't stop the entire script. | |
| ***********/ | |
| function generateReport(results) { | |
| var NOTIFY = ['your_email@example.com']; | |
| var total_deleted = 0; |
| // Based on Russell Savage's script: http://goo.gl/Y9jEcG | |
| var ENTITY = 'Ad Group'; // Change to Ad or Keyword or Campaign | |
| var PAUSE_PREFIX = "Pause on "; | |
| var ENABLE_PREFIX = "Enable on "; | |
| function main() { | |
| var accountSelector = MccApp.accounts().withLimit(50); | |
| accountSelector.executeInParallel('processAccount', 'allFinished'); | |
| } |
| /** | |
| * | |
| * AdWords Script for checking the contents of landing pages. | |
| * Goes to the final URL of keywords or ads, then searches the source code for | |
| * user defined strings. | |
| * | |
| * Version: 1.0 | |
| * Google AdWords Script maintained by brainlabsdigital.com | |
| * | |
| **/ |
| from matplotlib import use | |
| from pylab import * | |
| from scipy.stats import beta, norm, uniform | |
| from random import random | |
| from numpy import * | |
| import numpy as np | |
| import os | |
| # Input data |
| function bigQueryRun() { | |
| projectId = "xxx"; // Replace xxx with your project id | |
| datasetId = "xxx"; // Replace xxx with your dataset id | |
| tableId = "xxx"; // Replace xxx with your | |
| yesterday = new Date(); | |
| yesterday.setDate(yesterday.getDate() - 29); | |
| var yesterday = Utilities.formatDate(yesterday, 'UTC', 'yyyy-MM-dd'); | |
| options = {"headers": {"authorization": "Token token=\"xxx\""}}; // Replace xxx with your API token | |
| response = UrlFetchApp.fetch('https://api.callrail.com/v1/calls.json?start_date=2015-01-01&end_date='+yesterday+'&per_page=250',options); | |
| pages = JSON.parse(response.getContentText()); |
| function createBigQueryTable() { | |
| projectId = "xxx"; | |
| datasetId = "xxx"; | |
| var tableId = 'xxx'; | |
| var table = { | |
| tableReference: { | |
| projectId: projectId, | |
| datasetId: datasetId, | |
| tableId: tableId | |
| }, |
| function loopMultipleArrays() { | |
| var a = ["a","b","c"]; | |
| var b = ["A","B","C"]; | |
| var c = [1,2,3]; | |
| output = ""; | |
| var i; | |
| for (i = 0; i < a.length; i += 1) { | |
| output += a[i] + b[i] + c[i] + "\n"; | |
| } | |
| } |
| // This script is designed to run on a 1 hour trigger in Google Apps Script. It is also written to "WRITE_TRUNCATE" your table | |
| // which means it deletes the table and updates it with the newest information. You can change the variables in campaignList | |
| // if you want to adjust it for your needs. | |
| function chimpyAPI30days() { | |
| projectId = "xxx"; | |
| datasetId = "xxx"; | |
| tableId = 'xxx'; | |
| yesterday = new Date(); | |
| yesterday.setDate(yesterday.getDate() - 29); |
| function youTubeAnalytics() { | |
| projectId = "xxx"; | |
| datasetId = "xxx"; | |
| tableId = 'xxx'; | |
| myChannels = YouTube.Channels.list('id', {mine: true}); | |
| channel = myChannels.items[0]; | |
| channelId = channel.id; | |
| yesterday = new Date(); | |
| yesterday.setDate(yesterday.getDate() - 2); // I'm setting it back two days because that's the most recent date for the API. | |
| yesterday = Utilities.formatDate(yesterday, 'UTC', 'yyyy-MM-dd'); |