Skip to content

Instantly share code, notes, and snippets.

View joeyism's full-sized avatar

Joey joeyism

View GitHub Profile
import gpt2
predictor = gpt2.Gpt2Predictor()
@joeyism
joeyism / main.py
Last active October 9, 2019 01:50
import gpt2
predictor = gpt2.Gpt2Predictor()
result = predictor.predict_json({"previous": "Toronto Raptors, who are currently tied for the league leader in wins"})
{"logits": [-166.17694091796875,
-166.25209045410156,
-166.281494140625,
-166.30458068847656,
-166.33673095703125,
-166.34226989746094,
-166.35443115234375,
-166.36343383789062,
-166.37081909179688,
-166.40570068359375],
@joeyism
joeyism / main.py
Last active October 9, 2019 02:07
import string
import copy
import gpt2
predictor = gpt2.Gpt2Predictor()
def predict_until_punctuation(input_str):
if not input_str:
raise Exception('input string required')
output_str = copy.deepcopy(input_str)
import torch.multiprocessing as mp
QUEUE_SIZE = mp.Value('i', 0)
def _decrease_queue():
with QUEUE_SIZE.get_lock():
QUEUE_SIZE.value -= 1
def _increase_queue():
with QUEUE_SIZE.get_lock():
import torch.multiprocessing as mp
QUEUE_SIZE = mp.Value('i', 0)
def _decrease_queue():
with QUEUE_SIZE.get_lock():
QUEUE_SIZE.value -= 1
def _increase_queue():
with QUEUE_SIZE.get_lock():
from transformers import GPT2Tokenizer, GPT2LMHeadModel
import torch
import numpy as np
OUTPUT_DIR = "./output"
device = 'cpu'
if torch.cuda.is_available():
device = 'cuda'
tokenizer = GPT2Tokenizer.from_pretrained(OUTPUT_DIR)
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@joeyism
joeyism / deploy_aws.sh
Created November 7, 2022 15:50
Deploying Flask to AWS
#!/bin/bash
CURRENT_ID=$(dig +short myip.opendns.com @resolver1.opendns.com)
read -p "Server Name [$CURRENT_ID]: " SERVER_NAME
SERVER_NAME=${SERVER_NAME:-${CURRENT_ID}}
CURRENT_WORK_DIR=$(pwd)
read -e -p "Working Directory [$CURRENT_WORK_DIR]: " WORKING_DIR
WORKING_DIR=${WORKING_DIR:-${CURRENT_WORK_DIR}}
read -e -p "GUnicorn Executable (i.e. /usr/bin/gunicorn): " GUNICORN_EXEC