| title | output |
|---|---|
Using Spark, Delta Lake and MLflow |
html_notebook |
First we define helper functions,
delta_version <- function(sc, path) {
invoke_static(sc, "io.delta.tables.DeltaTable", "forPath", spark_session(sc), path) %>%
| import SimpleHTTPServer | |
| import BaseHTTPServer | |
| def main(): | |
| request_handler = SimpleHTTPServer.SimpleHTTPRequestHandler | |
| request_handler.server_version = "Jetty(8.y.z-SNAPSHOT)" | |
| request_handler.sys_version = "" | |
| BaseHTTPServer.test(HandlerClass = request_handler, ServerClass = BaseHTTPServer.HTTPServer) | |
| if __name__ == "__main__": |
| #ifdef _MSC_VER | |
| #pragma warning( disable : 4146 ) | |
| #endif | |
| #include <iostream> | |
| #include <torch/torch.h> | |
| struct Net : torch::nn::Module { | |
| Net(int64_t N, int64_t M) { | |
| W = register_parameter("W", torch::randn({ N, M })); |
| title | output |
|---|---|
Using Spark, Delta Lake and MLflow |
html_notebook |
First we define helper functions,
delta_version <- function(sc, path) {
invoke_static(sc, "io.delta.tables.DeltaTable", "forPath", spark_session(sc), path) %>%
Download and install rtools40-x86_64.exe from https://cran.r-project.org/bin/windows/testing/rtools40.html, then launch RTools MinGW 64-bit
pacman -S mingw-w64-{i686,x86_64}-toolchain
pacman -S mingw-w64-{i686,x86_64}-cmake
git checkout v1.4.0a0
This script makes use of an Amazon EMR cluster with one master and three nodes:
install.packages("sparklyr")
install.packages("keras")
sc <- spark_connect(
master = "yarn",
spark_home = "/usr/lib/spark/",
config = list("sparklyr.apply.env.WORKON_HOME" = "/tmp/.virtualenvs"))This script requires an Amazon EMR cluster with one master and three nodes:
library(sparklyr)
sc <- spark_connect(master = "yarn", spark_home = "/usr/lib/spark/", config = list(
spark.dynamicAllocation.enabled = FALSE,
`sparklyr.shell.executor-cores` = 8,
`sparklyr.shell.num-executors` = 3,
sparklyr.apply.env.WORKON_HOME = "/tmp/.virtualenvs"))| library(glmnet) | |
| wine_file <- pins::pin("https://raw.githubusercontent.com/rstudio/mlflow-example/master/wine-quality.csv") | |
| train <- read.csv(wine_file) | |
| train_x <- as.matrix(train[, !(names(train) == "quality")]) | |
| train_y <- train[, "quality"] | |
| alpha <- mlflow_log_param("alpha", 0.7, "numeric") |
| library(tidyverse) | |
| rtweet::get_timeline(rtweet::as_userid("karpathy"), n = 1000) %>% | |
| mutate(year = as.character(lubridate::year(as.Date(created_at))), | |
| month = lubridate::month(as.Date(created_at))) %>% | |
| group_by(year, month) %>% | |
| count(year, month) %>% | |
| ggplot(aes(month, n, color = year)) + geom_point() + | |
| geom_smooth(method = "lm", alpha = .15, aes(fill = year)) + | |
| ggtitle("Karpathy's Tweets per Month") |
| --- | |
| title: "Using GPUs with TensorFlow and Docker" | |
| subtitle: Multiverse Video Series | |
| author: Javier Luraschi | |
| output: | |
| revealjs::revealjs_presentation: | |
| df_print: paged | |
| self_contained: true | |
| theme: moon | |
| --- |
library(tidyverse)
library(rayrender)
set.seed(2019)
spheres <- expand.grid(y = (1:50) / 5 - 5, z = (1:50) / 5 - 5) %>%
tibble::as_tibble()
noise <- ambient::noise_perlin(c(100, 100, 1))