(C-x means ctrl+x, M-x means alt+x)
The default prefix is C-b. If you (or your muscle memory) prefer C-a, you need to add this to ~/.tmux.conf
:
import logging | |
import pika | |
from threading import Timer | |
BATCH_SIZE = 100 | |
LOG_FORMAT = ('%(levelname) -10s %(asctime)s %(name) -30s %(funcName) ' | |
'-35s %(lineno) -5d: %(message)s') | |
LOGGER = logging.getLogger(__name__) | |
#! /usr/bin/env python | |
# Standard library imports. | |
from SocketServer import ThreadingMixIn | |
import BaseHTTPServer | |
import SimpleHTTPServer | |
import sys | |
import json | |
import os | |
from os.path import (join, exists, dirname, abspath, isabs, sep, walk, splitext, |
import argparse | |
import os | |
import shutil | |
import time | |
import torch | |
import torch.nn as nn | |
import torch.nn.parallel | |
import torch.backends.cudnn as cudnn | |
import torch.optim |
class SelfAttention(nn.Module): | |
def __init__(self, attention_size, batch_first=False, non_linearity="tanh"): | |
super(SelfAttention, self).__init__() | |
self.batch_first = batch_first | |
self.attention_weights = Parameter(torch.FloatTensor(attention_size)) | |
self.softmax = nn.Softmax(dim=-1) | |
if non_linearity == "relu": | |
self.non_linearity = nn.ReLU() |
:80 { | |
root /serve | |
} |
# all imports | |
from IPython.display import Javascript | |
from google.colab import output | |
from base64 import b64decode | |
from io import BytesIO | |
!pip -q install pydub | |
from pydub import AudioSegment | |
RECORD = """ | |
const sleep = time => new Promise(resolve => setTimeout(resolve, time)) |
import os | |
import re | |
import sys | |
import glob | |
import nltk | |
import gensim | |
import numpy as np | |
import pandas as pd | |
from tqdm import tqdm | |
from uuid import uuid4 |
import gc | |
import gzip | |
import time | |
import json | |
import shutil | |
import os,sys | |
import tldextract | |
import collections | |
import pandas as pd | |
from tqdm import tqdm |