Based on discussions on https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf/discussions/10
- pip install git+https://github.com/huggingface/transformers.git@main
- pip install tokenizer transformers
# Use a pipeline as a high-level helper
from transformers import pipeline
from transformers import AutoModelForCausalLM, AutoTokenizer, AutoConfig
tokenizer = AutoTokenizer.from_pretrained("codellama/CodeLlama-7b-Instruct-hf")
model = AutoModelForCausalLM.from_pretrained("codellama/CodeLlama-7b-Instruct-hf")
# Create a pipeline
code_generator = pipeline('text-generation', model=model, tokenizer=tokenizer)
# Generate code for an input string
input_string = "Write a python function to calculate the factorial of a number"
generated_code = code_generator(input_string, max_length=100)[0]['generated_text']
print(generated_code)