Skip to content

Instantly share code, notes, and snippets.

View alfcrisci's full-sized avatar

Alfonso alfcrisci

View GitHub Profile
#!/usr/bin/python
import io, json
from TwitterAPI import TwitterAPI
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_TOKEN_KEY = ''
ACCESS_TOKEN_SECRET = ''
# Install subversion
sudo apt-get -y install subversion
# Install g++
sudo apt-get -y install g++
# Install Hierarchical Data Format library
# NOTE: This library is not necessarily needed, but was required
# in order for this to compile against a clean Ubuntu 12.04 LTS system.
# I didn't need it on a clean EC2 Ubuntu 12.10 instance, so
library(geojsonio)
library(rleafmap)
url="http://149.139.8.55/dati_toscana/geojson/biometeo_last.geojson"
out <- geojson_read(url)
stamen.bm <- basemap("stamen.toner")
velov.sta <- spLayer(out, stroke = F, popup = out$Nome)
writeMap(stamen.bm, velov.sta, width = 800, height =600,setView = c(43.70, 11.00), setZoom = 8)
@alfcrisci
alfcrisci / geo_nominatim.r
Created April 19, 2015 15:48
geolocating with nominatim
geo_nominatim <- function(location) {
require(curl)
require(stringr)
require(jsonlite)
# Forming url
location <- str_replace_all(location, ' ', '+')
location <- URLencode(location)
@alfcrisci
alfcrisci / scrapeItalianCNR.r
Created April 19, 2015 21:08
Rvest scraping CNR ita
library(rvest)
library(stringr)
library(rleafmap)
# html() is for download of content
# html_node() is for selecting node(s) from the downloaded content of a page
# html_text() is for extracting text from a previously selected node
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
wiki_interlingual=function(page,languagestr="cs|de|es|fr|en|pl|pt|ja|ko|zh|nl|ru|cu") {
options(encoding = "UTF-8")
require(XML)
strURL=paste0("http://www.wikidata.org/w/api.php?action=wbgetentities&sites=itwiki&titles=",page,"&languages=",languagestr,"&props=labels&format=xml")
data <- xmlParse(strURL)
data <- xmlToList(data)
if (data$.attrs == 1)
{data <- do.call("rbind",data$entities$entity$labels)
row.names(data)<-NULL
getOpenDapURLAsSpatialGrid = function(opendapURL,variableName,bboxInDegrees)
{
require("sp")
require("ncdf")
print(paste("Loading opendapURL",opendapURL));
# Open the dataset
library(scholar)
a=get_publications("D6YYpg0AAAAJ", cstart = 0)
b=get_publications("D6YYpg0AAAAJ", cstart = 20)
c=get_publications("D6YYpg0AAAAJ", cstart = 40)
d=get_publications("D6YYpg0AAAAJ", cstart = 60)
e=get_publications("D6YYpg0AAAAJ", cstart = 80)
pub_crisci=rbind(a,b,c,d,e)
write.csv(pub_crisci,"pub_crisci.csv")
@alfcrisci
alfcrisci / R_comuni_ISPRA_data.r
Last active August 29, 2015 14:20
R code to assure reproducibility CNR web location data repo http://149.139.8.55/data/comuni_ispra_suolo/ . See ISPRA licence for data reuse.
###########################################################################
# Cutting and reprojecting in WGS84 Geographic the ISPRA data of soil leasing for each Administrative unit - Italy following
# OpenstreetMap Extracts bounds https://github.com/osmItalia/estratti-locali-openstreetmap.
# IBIMET CNR for @CNRconsumosuolo twitter
# Vivaioricerca
##################################################################################################
if (!require("RColorBrewer")) {
install.packages("RColorBrewer")