I updated my package autolike today using the following steps:
- Changed the version number in
setup.py
and modify package as necessary.
Assuming you’re in the root of your project directory, then run:
pip install -e .
! This is the most annoying one that Google optimizes for accidental clicks. | |
! | |
! Hide the 'People also search for' slide-in box when going back on Google Search. | |
! We require the other attributes here to reduce the risk of false positives. | |
www.google.com#$#div[id^="eob_"][jscontroller][jsdata][jsaction][data-ved] { display: none !important; } | |
! Hide the 'People also ask' box | |
www.google.com#$#div[data-initq][data-it][jscontroller][jsaction]:nth-ancestor(1) { display: none !important; } | |
! Hide the 'Related searches' box |
def init_weights(m, variance=1.0): | |
def _calculate_fan_in_and_fan_out(tensor): | |
dimensions = tensor.dim() | |
if dimensions < 2: | |
return 1, 1 | |
if dimensions == 2: # Linear | |
fan_in = tensor.size(1) | |
fan_out = tensor.size(0) |
I updated my package autolike today using the following steps:
setup.py
and modify package as necessary.Assuming you’re in the root of your project directory, then run:
pip install -e .
#!/usr/bin/env python | |
'''Python implementation of Firth regression by John Lees | |
See https://www.ncbi.nlm.nih.gov/pubmed/12758140''' | |
def firth_likelihood(beta, logit): | |
return -(logit.loglike(beta) + 0.5*np.log(np.linalg.det(-logit.hessian(beta)))) | |
# Do firth regression | |
# Note information = -hessian, for some reason available but not implemented in statsmodels | |
def fit_firth(y, X, start_vec=None, step_limit=1000, convergence_limit=0.0001): |
# This small script shows how to use AllenNLP Semantic Role Labeling (http://allennlp.org/) with SpaCy 2.0 (http://spacy.io) components and extensions | |
# Script installs allennlp default model | |
# Important: Install allennlp form source and replace the spacy requirement with spacy-nightly in the requirements.txt | |
# Developed for SpaCy 2.0.0a18 | |
from allennlp.commands import DEFAULT_MODELS | |
from allennlp.common.file_utils import cached_path | |
from allennlp.service.predictors import SemanticRoleLabelerPredictor | |
from allennlp.models.archival import load_archive |
def dot_product(x, kernel): | |
""" | |
Wrapper for dot product operation, in order to be compatible with both | |
Theano and Tensorflow | |
Args: | |
x (): input | |
kernel (): weights | |
Returns: | |
""" | |
if K.backend() == 'tensorflow': |
from keras import backend as K, initializers, regularizers, constraints | |
from keras.engine.topology import Layer | |
def dot_product(x, kernel): | |
""" | |
Wrapper for dot product operation, in order to be compatible with both | |
Theano and Tensorflow | |
Args: |
# Note – this is not a bash script (some of the steps require reboot) | |
# I named it .sh just so Github does correct syntax highlighting. | |
# | |
# This is also available as an AMI in us-east-1 (virginia): ami-cf5028a5 | |
# | |
# The CUDA part is mostly based on this excellent blog post: | |
# http://tleyden.github.io/blog/2014/10/25/cuda-6-dot-5-on-aws-gpu-instance-running-ubuntu-14-dot-04/ | |
# Install various packages | |
sudo apt-get update |