Skip to content

Instantly share code, notes, and snippets.

View jekbradbury's full-sized avatar

James Bradbury jekbradbury

View GitHub Profile
using InteractiveUtils
const Compiler = Core.Compiler
function method_match_to_ir(methds, types)
if length(methds) != 1
@show methds
@assert false
end
x = methds[1]
meth = Core.Main.Base.func_for_method_checked(x[3], types)
@maleadt
maleadt / tinycassette.jl
Last active April 12, 2018 10:47
Reimplementation of Cassette's core mechanics
module Cassette
using Logging
# wrapper for overdubbing functions
struct Overdub{F,C}
func::F
context::C
Overdub(f::F, c::C=nothing) where {F,C} = new{F,C}(f,c)
end
@tbeason
tbeason / fulltabular.jl
Created January 22, 2018 17:02
a more-featured LaTeX table printing function
using LatexPrint
function fulltabular(A::AbstractArray{T,2}; placement::String="htb",rounding::Int=0, colnames::Vector{String}=Vector{String}(), frontmatter::String="" , caption::String="", title::String="", label::String="", strfirstcol::Bool=false,arraystretch=1) where {T<:Any}
(r,c) = size(A)
println("\\begin{table}[", placement, "] \\centering")
println("\\renewcommand{\\arraystretch}{",arraystretch ,"}")
println(frontmatter)
if !isempty(title)
println("\\caption{", title, "}")
@JonathanRaiman
JonathanRaiman / human.cpp
Last active November 27, 2018 02:25
Code auto generated by Dali
auto a = op::uniform(-20.0, 20.0, {2, 5}).astype(dtype);
a.eval();
auto exped = op::exp(a - op::max(a, {-1}, true));
auto fused_softmax = exped / op::sum(exped, {-1}, true);
# To ensure correct alignment (e.g. for an 80-bit type)
struct StrWrap{T}
value::T
end
function unsafe_reinterpret(T, a::A) where {A}
if sizeof(T) <= sizeof(A)
r = Ref(a)
Base.@gc_preserve r begin
u = convert(Ptr{T}, Base.unsafe_convert(Ptr{A}, r))
@apaszke
apaszke / Rop.py
Last active January 16, 2023 07:20
def Rop(y, x, v):
"""Computes an Rop.
Arguments:
y (Variable): output of differentiated function
x (Variable): differentiated input
v (Variable): vector to be multiplied with Jacobian from the right
"""
w = torch.ones_like(y, requires_grad=True)
return torch.autograd.grad(torch.autograd.grad(y, x, w), w, v)
@zou3519
zou3519 / hang.py
Created October 19, 2017 02:17
This script gets stuck, but only on some machines...
from torch import nn
from torch.autograd import Variable
import torch
l = nn.Linear(5,5).cuda()
pl = nn.DataParallel(l)
print("Checkpoint 1")
a = Variable(torch.rand(5,5).cuda(), requires_grad=True)
print("Checkpoint 2")
print(pl(a)) # Here it gets stuck

Status quo

Currently the AbstractArray type hierarchy has three major subtype trees:

  • DenseArray
  • AbstractSparseArray
  • AbstractRange

In addition, we have the StridedArray typealias, which effectively “adds” strided SubArrays and ReshapedArrays as pseudo-subtypes of DenseArrays.

We also have the IndexStyle trait.

@simonbyrne
simonbyrne / fisher.jl
Created October 4, 2017 05:25
Computing Fisher information via forward-mode automatic differentiation
using Distributions
import ForwardDiff: Dual, value, partials
@generated function get_values(a::NTuple{N}) where {N}
return ForwardDiff.tupexpr(i -> :(value(a[$i])),N)
end
ForwardDiff.value(p::ForwardDiff.Partials) =
ForwardDiff.Partials(get_values(p.values))
import torch
from torch.autograd import Variable
leaves = [Variable(torch.zeros(5, 5), requires_grad=True) for _ in range(10)]
intermediates = [l + i for i, l in enumerate(leaves)]
loss = sum(v * i for i, v in enumerate(intermediates)).sum()
# define a helper for dividing intermediates into groups
def group(l, group_size):
"""Groups l into chunks of size group_size.