Code below. To get the game you want, need to choose season, type (regular, postseason, etc), home team, away team, and the rest will go on itself.
Code from @benbbaldwin
library(nflscrapR)
library(tidyverse)
library(na.tools)
# Path to your oh-my-zsh configuration. | |
export ZSH=$HOME/.oh-my-zsh | |
# Set name of the theme to load. | |
# Look in ~/.oh-my-zsh/themes/ | |
# Optionally, if you set this to "random", it'll load a random theme each | |
# time that oh-my-zsh is loaded. | |
#export ZSH_THEME="robbyrussell" | |
export ZSH_THEME="zanshin" |
## | |
## Slightly nicer .bashrc | |
## Makes pretty colors and stuff | |
## | |
## Set $PATH, which tells the computer where to search for commands | |
export PATH="$PATH:/usr/sbin:/sbin:/bin:/usr/bin:/etc:/usr/ucb:/usr/local/bin:/usr/local/local_dfs/bin:/usr/bin/X11:/usr/local/sas" | |
## Where to search for manual pages | |
export MANPATH="/usr/share/man:/usr/local/man:/usr/local/local_dfs/man" |
library(tidycensus) | |
library(tidyverse) | |
library(extrafont) | |
city16 <- get_acs(geography = "place", | |
variables = "B01003_001", | |
survey = "acs1") %>% | |
filter(estimate > 500000) %>% | |
rename(estimate16 = estimate) |
library(tidyverse) | |
library(hrbrthemes) | |
# download https://github.com/rfordatascience/tidytuesday/blob/master/data/tidy_tuesday_week2.xlsx | |
football <- read_xlsx("data/tidy_tuesday_week2.xlsx") | |
# get the top 16 paid players in each position for each year | |
to_plot <- football %>% | |
mutate(Team = 1:nrow(.)) %>% | |
gather(position, salary, -c(year, Team)) %>% |
--- | |
title: "Decouple Code and Output in xaringan slides" | |
subtitle: "Demo Slides for <a href='https://garrickadenbuie.com/blog/2018/08/14/decouple-code-and-output-in-xaringan-slides/'>Related Blog Post</a>" | |
author: "Garrick Aden-Buie" | |
date: "`r Sys.Date()`" | |
output: | |
xaringan::moon_reader: | |
lib_dir: libs | |
nature: | |
ratio: 16:9 |
body { | |
background-color: #1d1d1d; | |
color: #e2e2e2; | |
font-family: -apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol"; | |
line-height: 1.5; | |
} | |
a { | |
color: #509eeb; | |
} |
# Load the packages we’re going to be using: | |
# Alongside the usual stuff like tidyverse and magrittr, we’ll be using rvest for some web-scraping, jsonline to parse some JSON, and extrafont to load some nice custom fonts | |
needs(tidyverse, magrittr, rvest, jsonlite, extrafont) | |
# Before we go on, two things to note: | |
# First, on web scraping: | |
# You should always check the terms of the site you are extracting data from, to make sure scraping (often referred to as `crawling`) is not prohibited. One way to do this is to visit the website’s `robots.txt` page, and ensure that a) there is nothing explicitly stating that crawlers are not permitted, and b) ideally, the site simply states that all user agents are permitted (indicated by a line saying `User-Agect: *`). Both of those are the case for our use-case today (see https://www.ultimatetennisstatistics.com/robots.txt). | |
# And second, about those custom fonts: |
library(tweetrmd) | |
library(htmltools) | |
library(reactable) | |
x <- tibble::tibble( | |
name = c("Tom", "Julia"), | |
handle = c("@thomas_mock", "@b0rk"), | |
tweet = c( | |
"https://twitter.com/thomas_mock/status/1294670115590545408", | |
"https://twitter.com/b0rk/status/1295029728127135746" |