I hereby claim:
- I am haletom on github.
- I am tomhale (https://keybase.io/tomhale) on keybase.
- I have a public key ASA0UCq38IWRstxf9o5_HXYCzXjb7paM7SF7QfHRykoxsAo
To claim this, I am signing this object:
# Print the line and filename, function call, the class, str representation and some other info | |
# Inspired by https://stackoverflow.com/a/8856387/5353461 | |
import inspect | |
import re | |
def describe(arg): | |
frame = inspect.currentframe() | |
callerframeinfo = inspect.getframeinfo(frame.f_back) |
% Based upon https://stackoverflow.com/a/45347880/5353461 | |
% Gist at: https://gist.github.com/HaleTom/533b0ed7c51f93bfb5f71007a188bac4 | |
function varargout = describe(varargin) | |
% varargin used to accommodate variable number of input names | |
st = dbstack; | |
outstring = ''; | |
for ii = size(st, 1):-1:2 | |
outstring = [outstring, st(ii).file, ' > ', st(ii).name, ', line ', num2str(st(ii).line), '\n']; | |
end |
function [J grad] = nnCostFunction(nn_params, ... | |
input_layer_size, ... | |
hidden_layer_size, ... | |
num_labels, ... | |
X, Y, lambda) | |
%NNCOSTFUNCTION Implements the neural network cost function for a two layer | |
%neural network which performs classification | |
% [J grad] = NNCOSTFUNCTON(nn_params, hidden_layer_size, num_labels, ... | |
% X, y, lambda) computes the cost and gradient of the neural network. The | |
% parameters for the neural network are "unrolled" into the vector |
I hereby claim:
To claim this, I am signing this object:
#!/bin/bash | |
# Update the bt-tracker= line in aria2.conf | |
# Any bt-tracker= lines are removed and and a new one added at the bottom of the file | |
# Updates at: https://gist.github.com/HaleTom/fe873dc2f3c5bd14f7418efefc2b91a8 | |
# Inspiration: https://github.com/wuyuansushen/aria2c_TrackersList | |
set -euo pipefail | |
shopt -s failglob |