Recently, GitHub introduced the change in how atx headers are parsed in Markdown files.
##Wrong
While this change follows the spec, it breaks many existing repositories. I took the README dataset which we created at source{d} and ran a simple
| from tensorboard.backend.event_processing import event_accumulator | |
| import tensorflow as tf | |
| import glob | |
| import pandas as pd | |
| tf.logging.set_verbosity(tf.logging.ERROR) | |
| basedir = "/path/to/log/directory/" | |
| def load_tf(dirname): | |
| prefix = basedir + "tboard/VisibleSwimmer-v2/" |
| #! /bin/bash | |
| # [get_golang.sh](https://gist.github.com/n8henrie/1043443463a4a511acf98aaa4f8f0f69) | |
| # Download latest Golang release for AMD64 | |
| # https://dl.google.com/go/go1.10.linux-amd64.tar.gz | |
| set -euf -o pipefail | |
| # Install pre-reqs | |
| sudo apt-get install python3 git -y | |
| o=$(python3 -c $'import os\nprint(os.get_blocking(0))\nos.set_blocking(0, True)') |
| wifi_adb() { | |
| PORT="$1" | |
| if [ -z "$PORT" ]; then | |
| PORT="5" | |
| fi | |
| re='^[0-9]+$' | |
| if ! [[ $PORT =~ $re ]] ; then | |
| echo "error: Not a number" >&2; return 1 | |
| fi |
| # Lazy dict allows storing function and argument pairs when initializing the dictionary, | |
| # it calculates the value only when fetching it. | |
| # In this examole, if the key starts with '#', it would accept a (function, args) tuple as value and | |
| # returns the calculated result when fetching the values. | |
| from collections.abc import Mapping | |
| class LazyDict(Mapping): | |
| def __init__(self, *args, **kw): |
| # This small script shows how to use AllenNLP Semantic Role Labeling (http://allennlp.org/) with SpaCy 2.0 (http://spacy.io) components and extensions | |
| # Script installs allennlp default model | |
| # Important: Install allennlp form source and replace the spacy requirement with spacy-nightly in the requirements.txt | |
| # Developed for SpaCy 2.0.0a18 | |
| from allennlp.commands import DEFAULT_MODELS | |
| from allennlp.common.file_utils import cached_path | |
| from allennlp.service.predictors import SemanticRoleLabelerPredictor | |
| from allennlp.models.archival import load_archive |
| diff --git a/app.py b/app.py | |
| index a2b76f9..cd4c055 100755 | |
| --- a/app.py | |
| +++ b/app.py | |
| @@ -198,7 +198,8 @@ class SplashScreen(wx.SplashScreen): | |
| self.control = Controller(self.main) | |
| self.fc = wx.FutureCall(1, self.ShowMain) | |
| - wx.FutureCall(1, parse_comand_line) | |
| + options, args = parse_comand_line() |
Recently, GitHub introduced the change in how atx headers are parsed in Markdown files.
##Wrong
While this change follows the spec, it breaks many existing repositories. I took the README dataset which we created at source{d} and ran a simple
| #!/bin/sh | |
| if [ "$PYENV_VERSION" -ne "" ] | |
| then | |
| name=`pyenv version-name` | |
| python=`pyenv which python` | |
| else | |
| name=`basename "$VIRTUAL_ENV"` | |
| python="$VIRTUALENV/bin/python" | |
| fi |
| from joblib import Parallel, delayed | |
| import Queue | |
| import os | |
| # Define number of GPUs available | |
| N_GPU = 4 | |
| # Put indices in queue | |
| q = Queue.Queue(maxsize=N_GPU) | |
| for i in range(N_GPU): |
| #!/usr/bin/env bash | |
| # This script is useful if: | |
| # - you have a manuscript that you want to upload to the arXiv, | |
| # - you are using biblatex, and | |
| # - you are using a recent version of texlive while arXiv is still on texlive2011 | |
| # | |
| # Put this file in a directory containing the manuscript you want to | |
| # upload to arXiv.org, and adapt the paths below. |