Built with blockbuilder.org
forked from anonymous's block: fresh block
Built with blockbuilder.org
forked from anonymous's block: fresh block
| Rcpp::sourceCpp('t.cpp') | |
| x <- runif(1000000) | |
| m <- microbenchmark::microbenchmark( | |
| psort(x), | |
| csort(x), | |
| sort(x) | |
| ) | |
| x <- 1000000:1 |
| library(keras) | |
| library(densenet) | |
| input_img <- layer_input(shape = c(28, 28, 1)) | |
| model <- application_densenet(input_tensor = input_img, classes = 10L) | |
| model %>% compile( | |
| optimizer = "adam", | |
| loss = "categorical_crossentropy", | |
| metrics = "accuracy" |
| download.file("https://snap.stanford.edu/data/finefoods.txt.gz", "finefoods.txt.gz") | |
| library(readr) | |
| library(stringr) | |
| library(purrr) | |
| reviews <- read_lines("finefoods.txt.gz") | |
| reviews <- reviews[str_sub(reviews, 1, 12) == "review/text:"] | |
| reviews <- str_sub(reviews, start = 14) |
| library(readr) | |
| library(keras) | |
| library(purrr) | |
| FLAGS <- flags( | |
| flag_integer("vocab_size", 50000), | |
| flag_integer("max_len_padding", 20), | |
| flag_integer("embedding_size", 256), | |
| flag_numeric("regularization", 0.0001), | |
| flag_integer("seq_embedding_size", 512) |
| --- | |
| title: "Quora Question Pairs" | |
| output: | |
| flexdashboard::flex_dashboard: | |
| orientation: columns | |
| runtime: shiny | |
| --- | |
| ```{r global, include=FALSE} | |
| library(keras) |
| todoist_token <- config::get("TODOIST_TOKEN") | |
| get_tasks_week <- function(week = 0, offset = 0) { | |
| res <- httr::POST( | |
| url = "https://api.todoist.com/sync/v8/activity/get", | |
| body = list( | |
| token = todoist_token, | |
| limit = 100, | |
| page = week, |
| library(torch) | |
| library(ggplot2) | |
| # we want to find the minimum of this function | |
| # using the gradient descent. | |
| f <- function(x) { | |
| x^2 - x | |
| } |
| library(keras) | |
| library(tensorflow) | |
| input <- layer_input(shape = list(365, 10)) | |
| representation <- input %>% | |
| layer_lstm(units = 32, input_shape = list(365, 10)) %>% | |
| layer_dropout(rate = 0.2) | |
| output1 <- representation %>% | |
| layer_dense(units = 2, name = "out1") |