Skip to content

Instantly share code, notes, and snippets.

View sergiolucero's full-sized avatar
💭
coding the days away

Sergio Lucero sergiolucero

💭
coding the days away
View GitHub Profile
@sergiolucero
sergiolucero / threenody.py
Last active June 18, 2018 00:51
merging three systems
import folium
import pandas as pd
import requests
blc = pd.read_json('https://www.bicilascondes.cl/availability_map/getJsonObject') # BiciLasCondes
js = requests.get('https://api.citybik.es/v2/networks/santiago').json() # BikeSantiago
bs = pd.DataFrame(js['network']['stations']).rename(columns={'latitude':'lat','longitude':'lon'})
def get_mobike(lat,lon):
{
"nodes": [
{"id": "J_Pizarro", "group": 1},
{"id": "P_Walker", "group": 1},
{"id": "B_Prokurica", "group": 8},
{"id": "J_Ossandón", "group": 8},
{"id": "R_Lagos", "group": 5},
{"id": "J_García", "group": 8},
{"id": "C_Bianchi", "group": 7},
{"id": "A_Zaldívar", "group": 1},
import folium, pandas as pd
blc = pd.read_json('https://www.bicilascondes.cl/availability_map/getJsonObject')
blc = blc.drop(['address','addressNumber','district','zip'], axis=1)
m = folium.Map(location=[blc.lat.mean(),blc.lon.mean()], zoom_start=14)
bikes = lambda row: 'red' if row['bikes']==0 else 'purple' if row['bikes']<5 else 'green'
for id, station_data in blc.iterrows():
folium.Marker((station_data['lat'],station_data['lon']),
icon=folium.Icon(color=bikes(station_data), icon='bicycle'),
popup=station_data.to_frame().to_html(header=False,justify='center')).add_to(m)
m.save('bicis.html')
@sergiolucero
sergiolucero / pointshapefinder.py
Created August 23, 2018 13:35
point in geometry
import geopandas
import pandas as pd
from shapely.geometry import Point
gdf = gp.read_file('http://quant.cl/static/DATA/GEO/comunas13.json')
rand = pd.np.random.randn
points=[Point((-70.7+0.1*rand(),-33.4+0.1*rand())) for _ in range(10)]
for point in points:
for cid,comuna in gdf.iterrows():
@sergiolucero
sergiolucero / airquality.py
Created August 24, 2018 03:19
air quality plots
# for every city, we fetch and plot data from all available parameters
import openaq
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(style="white", palette='muted', font_scale=1.35, color_codes=True)
api = openaq.OpenAQ()
def get_city_data(city='Santiago'):
df = api.measurements(city=city, limit=10000, df=True)
df = df.query("value >= 0.0") # clean up the data by removing values below 0
@sergiolucero
sergiolucero / cancionero.py
Last active January 9, 2023 18:58
Cancionero de La Cuerda
import requests, sys
from bs4 import BeautifulSoup
from docx import Document
ubs = lambda url:BeautifulSoup(requests.get(url).text,'html5lib')
def get_chords(artist = 'Manu Chao'):
fartist = '_'.join(s.lower() for s in artist.split()) # use map
url = f'https://acordes.lacuerda.net/{fartist}/'
@sergiolucero
sergiolucero / artista_la_cuerda.py
Last active August 31, 2018 17:34
canciones de un artista en LaCuerda.net
import requests
from bs4 import BeautifulSoup
from operator import methodcaller
url_bs = lambda url: BeautifulSoup(requests.get(url).text,'html5lib') # magic!
def recopila_acordes(artista = 'Manu Chao'): # "Manu Chao" -> manu_chao
fartist = '_'.join(map(methodcaller("lower"),artista.split()))
url = f'https://acordes.lacuerda.net/{fartist}/'
from docx import Document
def compilar_cancionero(artista):
data_canciones = recopila_acordes(artista)
document = Document()
document.add_heading(artista, 0)
for songname, song_chords in data_canciones.items():
@sergiolucero
sergiolucero / transantiago.py
Created September 26, 2018 01:59
recorridos transantiago 2018
import pandas as pd
import folium, requests, time
fm=folium.Map([-33.4,-70.65], zoom_start=12)
#LINEAS = ['101','206', '345']
recorridos = eval(requests.get('http://www.transantiago.cl/restservice/rest/getservicios/all').text)
LINEAS = recorridos #[:10]
@sergiolucero
sergiolucero / down_cntv.py
Created September 26, 2018 02:17
denuncias CNTV
import requests
import pandas as pd
from bs4 import BeautifulSoup
# IDEAS: multas, elencos: http://es.teleserieschile.wikia.com/wiki/Categor%C3%ADa:Teleseries_de_Canal_13
bs=BeautifulSoup(requests.get('https://www.cntv.cl/cntv/site/tax/port/all/taxport_16___1.html').text,'html5')
links = bs.find_all('a')
denuncias=[l for l in links if 'Lo más' in l.text]
droot='https://www.cntv.cl'
df=pd.DataFrame()