Skip to content

Instantly share code, notes, and snippets.

@kyo-takano
Last active July 25, 2023 05:27
Show Gist options
  • Save kyo-takano/0c7bf0479158aa137e0ba935dec70461 to your computer and use it in GitHub Desktop.
Save kyo-takano/0c7bf0479158aa137e0ba935dec70461 to your computer and use it in GitHub Desktop.
0c7bf0479158aa137e0ba935dec70461
Display the source blob
Display the rendered blob
Raw
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": [],
"gpuType": "T4"
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
},
"accelerator": "GPU"
},
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "cIe9XY2UDYKs"
},
"outputs": [],
"source": [
"%%sh\n",
"pip install -q -U bitsandbytes\n",
"pip install -q -U git+https://github.com/huggingface/transformers.git\n",
"pip install -q -U git+https://github.com/huggingface/accelerate.git"
]
},
{
"cell_type": "code",
"source": [
"import torch\n",
"from transformers import AutoTokenizer, AutoModelForCausalLM\n",
"\n",
"MODEL_ID = \"kyo-takano/open-calm-7b-8bit\"\n",
"model = AutoModelForCausalLM.from_pretrained(MODEL_ID)\n",
"tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)\n",
"\n",
"inputs = tokenizer(\"AIによって私達の暮らしは、\", return_tensors=\"pt\").to(model.device)\n",
"with torch.no_grad():\n",
" tokens = model.generate(\n",
" **inputs,\n",
" max_new_tokens=64,\n",
" do_sample=True,\n",
" temperature=0.7,\n",
" top_p=0.9,\n",
" repetition_penalty=1.05,\n",
" pad_token_id=tokenizer.pad_token_id,\n",
" )\n",
" \n",
"output = tokenizer.decode(tokens[0], skip_special_tokens=True)\n",
"print(output)"
],
"metadata": {
"id": "wms9wdoGDgee"
},
"execution_count": null,
"outputs": []
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment