I hereby claim:
- I am fubits1 on github.
- I am fubits (https://keybase.io/fubits) on keybase.
- I have a public key ASCBPqZQmVjZKFDJNPD92EEVEXLzZow-57iZx-4msM15dgo
To claim this, I am signing this object:
| def measure_semantic_shift_by_neighborhood(model1,model2,word,k=25,verbose=False): | |
| """ | |
| Basic implementation of William Hamilton (@williamleif) et al's measure of semantic change | |
| proposed in their paper "Cultural Shift or Linguistic Drift?" (https://arxiv.org/abs/1606.02821), | |
| which they call the "local neighborhood measure." They find this measure better suited to understand | |
| the semantic change of nouns owing to "cultural shift," or changes in meaning "local" to that word, | |
| rather than global changes in language ("linguistic drift") use that are better suited to a | |
| Procrustes-alignment method (also described in the same paper.) | |
| Arguments are: |
| Dummy |
| {"status":"success","data":{"resultType":"matrix","result":[{"metric":{},"values":[[1528041600,"102.43391526291488"],[1528045200,"65.27196652719665"],[1528048800,"124.51882845188284"],[1528052400,"364.87669611378044"],[1528056000,"183.96696534350002"],[1528059600,"961.6033647520676"],[1528063200,"1879.493872956394"],[1528066800,"2909.280958465486"],[1528070400,"2762.5223109500967"],[1528074000,"2759.345753378236"],[1528077600,"2921.181614271897"],[1528081200,"4400.141282278818"],[1528084800,"3470.964912153553"],[1528088400,"2854.2055329831237"],[1528092000,"3478.3747877579676"],[1528095600,"4375.370035672176"],[1528099200,"5467.983879136413"],[1528102800,"3437.2946121956893"],[1528106400,"1306.4284762774053"],[1528110000,"7507.044709329386"],[1528113600,"2355.919232170903"],[1528117200,"893.1636953489782"],[1528120800,"846.2098475015198"],[1528124400,"4232.320772507555"],[1528128000,"7506.763612642862"],[1528131600,"8659.400483909612"],[1528135200,"6146.439094062068"],[1528138800,"5709.934255702872"],[1528142 |
| library("roomba") | |
| json_jsonlite <- jsonlite::fromJSON("data.json", | |
| simplifyVector = FALSE, | |
| simplifyDataFrame = FALSE, | |
| simplifyMatrix = FALSE) | |
| json_roomba <- roomba(json_jsonlite$data, cols = "values") | |
| library(tidyverse) | |
| json_roomba %>% str_split(",") | |
| #jsonlite atomic level |
| # gridSVG::grid.garnish can be used to add SVG attributes to elements of a plot! | |
| # we don't even need to do it one at a time: with the option `group = FALSE`, we | |
| # can give it a vector of values for an attribute and garnish an entire geom at | |
| # once. the only rub is that sometimes ggplot2 shuffles your data around when | |
| # plotting it. we need to check how the data was reordered, and arrange our | |
| # original data to match, before we send it to grid.garnish. | |
| library(tidyverse) | |
| library(magrittr) | |
| library(grid) |
| --- | |
| title: "R Notebook" | |
| output: html_document | |
| --- | |
| ```{r echo=FALSE, warning=FALSE, message=FALSE} | |
| library(tidyverse) | |
| library(leaflet) | |
| library(mapview) | |
| library(sf) |
| library(tidyverse) | |
| annotation <-"HP" | |
| # identify max char length of y-label | |
| ymax_chr <- nchar(as.integer(max(mtcars$hp))) | |
| ggplot(mtcars, aes(x = mpg, y = hp)) + | |
| geom_point(color = "black", alpha = 0.5) + | |
| scale_y_continuous( |
| #!/bin/bash | |
| ## libraries: sed, exiftool | |
| # sudo apt-get install sed libimage-exiftool-perl | |
| ## if output folder is needed | |
| # [ -d output-folder ] || mkdir output-folder # checks if output folder exists, and create if not | |
| i=0 # to track iteration cycle for lookup by row in text file with sed | |
| ## for each JPG | |
| for filename in source-folder/*.jpg; do | |
| ((i++)) | |
| ## lookup row i in text file |
I hereby claim:
To claim this, I am signing this object:
| create type public.continents as enum ( | |
| 'Africa', | |
| 'Antarctica', | |
| 'Asia', | |
| 'Europe', | |
| 'Oceania', | |
| 'North America', | |
| 'South America' | |
| ); | |
| create table public.countries ( |