Good idea or bad idea for YAML metadata?
---
title: "My report"
author: "Tristan Mahr"
output: html_document
params:
knitr_chunks: !r
knitr::opts_chunk$set(comment = "#>",
library("devtools") | |
install_github("sckott/cowsay") | |
library("cowsay") | |
say("HAPPY BIRTHDAY", "cat") | |
## ----- | |
## HAPPY BIRTHDAY | |
## ------ | |
## \ | |
## \ |
sprintf("%03.f", seq_len(20)) | |
# [1] "001" "002" "003" "004" "005" "006" "007" "008" "009" "010" | |
#[11] "011" "012" "013" "014" "015" "016" "017" "018" "019" "020" |
# Code for formatting numbers (for my knitr docs) | |
library("magrittr") | |
library("dplyr", warn.conflicts = FALSE) | |
library("broom") | |
library("stringr") | |
# Fixed width integers (like for track or image numbers in filenames) | |
sprintf("%03.f", seq_len(20)) | |
# [1] "001" "002" "003" "004" "005" "006" "007" "008" "009" "010" |
library("dplyr") | |
library("readr") | |
# Download a tbl from a db connection and write to a csv | |
backup_tbl <- function(tbl_name, src, output_dir) { | |
# Try to download the tbl, defaulting to an empty data-frame | |
try_tbl <- failwith(data_frame(), tbl) | |
df <- collect(try_tbl(src, tbl_name)) | |
output_file <- file.path(output_dir, paste0(tbl_name, ".csv")) |
library("magrittr") | |
library("stringr") | |
library("lubridate") | |
library("xml2") | |
get_date <- function(xml_blob) { | |
# Convert XML clock info into an R list | |
xml_list <- xml_blob %>% | |
str_replace_all("\\\\n", "") %>% | |
read_xml %>% |
# Download all the 'See also: Artist: "Title"' entries from | |
# Pitchfork's Top 200 songs of the 80's | |
# http://pitchfork.com/features/staff-lists/9700-the-200-best-songs-of-the-1980s/ | |
library("rvest") | |
library("stringr") | |
library("dplyr", warn.conflicts = FALSE) | |
library("curl") | |
# Given url, get see-also paragraphs and text matching "See also*" | |
scrape_see_also_nodes <- function(url) { |
Good idea or bad idea for YAML metadata?
---
title: "My report"
author: "Tristan Mahr"
output: html_document
params:
knitr_chunks: !r
knitr::opts_chunk$set(comment = "#>",
# Function factory | |
make_adder <- function(x) { | |
function() x + 1 | |
} | |
# Create list of functions with Map | |
funs <- Map(make_adder, 1:10) | |
unlist(Map(function(f) f(), funs)) | |
# [1] 2 3 4 5 6 7 8 9 10 11 |
get_p_stars <- function(ps) { | |
symnum(ps, na = FALSE, cutpoints = c(0, 0.001, 0.01, 0.05, 0.1, 1), | |
symbols = c("***", "**", "*", ".", " "), legend = FALSE) | |
} | |
#> get_p_stars(c(.00001, .001, .002, .01, .02, .05, .06, .1, .2)) | |
#> [1] *** *** ** ** * * . . |
library("readr") | |
library("dplyr") | |
library("tidyr") | |
library("lubridate") | |
# Load the visit history, parse the timestamp as a Date | |
mdr <- read_csv("C:/Users/trist/Downloads/longitudinal_missing.csv") %>% | |
mutate(Date = mdy_hm(DateTime) %>% as.Date) | |
# for each study/subject, find difference between latest and earliest date |