Skip to content

Instantly share code, notes, and snippets.

# Import the required modules
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
import time
import json
def get_perf_log_on_load(url, headless = True, filter = None):
# Main Function
# Enable Performance Logging of Chrome.
library(tidyverse)
library(rvest)
library(ggpubr)
library(COVID19)
## 1st step:
# go to http://www.sharkattackdata.com/place/united_states_of_america
## 2nd step:
# save HTML file to same location as script
library(tidyverse)
## this r code is taking from here: https://github.com/mkearney/pkguse
read_r_files <- function(...) {
dirs <- unlist(list(...))
r <- unlist(lapply(dirs, list.files,
pattern = "\\.(R|Rmd|Rmarkdown|rmd|r|Rhistory)$",
recursive = TRUE,
full.names = TRUE,
all.files = TRUE))
@favstats
favstats / extract_dat.R
Created July 12, 2022 15:59
Latent GOLD exports data in a very weird way because commas are kept for decimals in a comma-separated data file which leads to issues. This script helps recovering the data by extracting the data row-by-row.
#===============================================================================
# 2022-07-12
# Extract data from Latent GOLD within R
# Fabio Votta (@favstats)
#===============================================================================
library(tidyverse)
library(data.table)
setwd(here::here())
#===============================================================================
# 2022-07-10
# Year progress twitter
# Fabio Votta (@favstats)
# Idea blatantly stolen from Ilya Kashnitsky (@ikashnitsky)
#===============================================================================
library(tidyverse)
library(magrittr)
library(lubridate)
save_csv <- function(d, path) {
if(file.exists(path)){
readr::write_csv(d, append = T, file = path)
} else {
dirs_to_create <- stringr::str_split(path, "\\/") %>%
unlist() %>%
purrr::discard(~stringr::str_detect(.x, "\\.")) %>%
paste0(collapse = "/")
library(tidyverse)
library(httr)
token <- Sys.getenv("fb_marketing_token")
#link to fb api
my_lin k<- "https://graph.facebook.com"
#define fields you are interested in
search_fields=c("ad_creation_time",
library(tidyverse)
library(modelbased)
library(magrittr)
overview <- readRDS("data/overview.rds")
## helper function
get_plabs <- function (pval) {
dplyr::case_when(is.na(pval) ~ "", pval < 0.001 ~ "***",
pval < 0.01 ~ "**", pval < 0.05 ~ "*",
## Create NYT Spiral Animations
## Most Code comes from here: https://bydata.github.io/nyt-corona-spiral-chart/
library(tidyverse)
library(lubridate)
library(gganimate)
library(viridis)
owid_url <- "https://github.com/owid/covid-19-data/blob/master/public/data/owid-covid-data.csv?raw=true"
### Code mostly inspired by https://github.com/HudsonJamie/tidy_tuesday/blob/main/2021/week_46/afrilearndata.R
library(pdftools)
library(tidygeocoder)
library(tidyverse)
library(janitor)
library(rgdal)
library(osmdata)
library(sf)
library(ggfx)