This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
set -eo pipefail | |
# https://stackoverflow.com/a/12194427/5257399 | |
create() { # fd base [qualifier [suffix [max]]] | |
local fd="$1" base="$2" qualifier="${3-}" suffix="${4-.png}" max="${5-}" | |
local n=0 file | |
local - # ash-style local scoping of options in 4.4+ | |
set -o noclobber | |
REPLY= |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
set -eo pipefail | |
# https://stackoverflow.com/a/12194427/5257399 | |
create() { # fd base [qualifier [suffix [max]]] | |
local fd="$1" base="$2" qualifier="${3-}" suffix="${4-.png}" max="${5-}" | |
local n=0 file | |
local - # ash-style local scoping of options in 4.4+ | |
set -o noclobber | |
REPLY= |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from abc import ABC, abstractmethod | |
from typing import NamedTuple, Optional | |
from typing_extensions import override | |
import torch | |
from torch import Tensor, no_grad, enable_grad | |
import torch.autograd.forward_ad as fwAD | |
from torch.autograd.function import FunctionCtx | |
from torch.nn import Linear, Module | |
from torch.nn.attention import SDPBackend, sdpa_kernel | |
from torch.nn.functional import scaled_dot_product_attention |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
from torch import FloatTensor | |
def mm(a: FloatTensor, b: FloatTensor) -> FloatTensor: | |
assert a.ndim == 2 | |
assert b.ndim == 2 | |
assert a.size(-1) == b.size(-2) | |
assert a.size(-2) == b.size(-1) | |
# batched dot product | |
def bdp(a_row: FloatTensor, b: FloatTensor) -> FloatTensor: |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from typing import Optional | |
import torch | |
from torch import FloatTensor, BoolTensor, Tensor, inference_mode | |
from torch.func import functional_call, stack_module_state | |
from torch.nn import Module, Linear | |
from torch.nn.functional import scaled_dot_product_attention | |
from einops import rearrange | |
class Attention(Module): | |
def __init__( |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
javascript: (async function copyTags() { | |
const replacements = { | |
v: "peace sign", | |
"double v": "double peace", | |
"|_|": "bar eyes", | |
"\\||/": "opem \\m/", | |
":|": "neutral face", | |
";|": "neutral face", | |
"eyepatch bikini": "square bikini", | |
"tachi-e": "character image", |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from typing import NamedTuple, Sequence, Optional | |
import torch | |
from torch import FloatTensor, LongTensor | |
class DevicePlacement(NamedTuple): | |
global_rank: int | |
world_size: int | |
class GradAcc(NamedTuple): | |
acc_step: int |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import argparse | |
import math | |
from dataclasses import dataclass | |
from enum import Enum | |
from typing import Callable, Optional | |
import torch | |
from einops import rearrange | |
from torch import ( | |
BoolTensor, |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from enum import Enum | |
from typing import Callable, Optional, Any | |
from einops import rearrange | |
from dataclasses import dataclass | |
import math | |
import torch | |
from torch import FloatTensor, LongTensor, IntTensor, BoolTensor, ByteTensor, no_grad, inference_mode | |
from torch.nn import Embedding, Linear, Module | |
from torch.nn.attention.flex_attention import BlockMask, flex_attention, create_block_mask, _score_mod_signature, _mask_mod_signature | |
from torch.nn.functional import scaled_dot_product_attention |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Caught signal 11 (Segmentation fault: address not mapped to object at address 0x20) | |
==== backtrace (tid: 63632) ==== | |
0 0x0000000000042520 __sigaction() ???:0 | |
1 0x0000000006e9fe76 torch::jit::InterpreterStateImpl::callstack() interpreter.cpp:0 | |
2 0x0000000006ea0172 torch::jit::InterpreterStateImpl::handleError() interpreter.cpp:0 | |
3 0x0000000006eac9fb torch::jit::InterpreterStateImpl::runTemplate<false>() interpreter.cpp:0 | |
4 0x0000000006eb0585 torch::jit::InterpreterStateImpl::run() interpreter.cpp:0 | |
5 0x0000000006e897b3 torch::jit::GraphExecutorImplBase::run() graph_executor.cpp:0 | |
6 0x0000000000d3d859 torch::jit::runAndInsertCall() python_custom_class.cpp:0 | |
7 0x0000000000e4208b torch::jit::invokeScriptMethodFromPython() script_init.cpp:0 |
NewerOlder