Skip to content

Instantly share code, notes, and snippets.

@kordless
Last active January 1, 2024 11:19
Show Gist options
  • Save kordless/7d306b0646bf0b56c44ebca2b8e96678 to your computer and use it in GitHub Desktop.
Save kordless/7d306b0646bf0b56c44ebca2b8e96678 to your computer and use it in GitHub Desktop.
Example of using OpenAI functions in completions with Python decorators.

Example of using OpenAI functions in completions with Python decorators

This example illustrates a way to utilize a function dynamically while querying an OpenAI GPT model. It uses the newly released functions support in the completion endpoints OpenAI provides.

The general concept is based on using a decorator to extract information from a function so it can be presented to the language model for use, and then pass the result of that function back to the completion endpoint for language augmentation.

In general, a wide variety of functions can be swapped in for use by the model. By changing the get_top_stories function, plus the prompt in run_conversation, you should be able to get the model to run your function without changing any of the other code.

Configuration

To use this, create a config.py file and add a variable with your OpenAI token:

# tokens
openai_token = "sk-<token_string>"

Ensure you have the OpenAI library installed:

pip3 install openai

Running

To run the example, do the following:

python3 top_hackernews.py

Sample Output

A sample run, asking for AI stories:

kord@bob PythonGPT $ python3 hackernews_top10.py
What type of HackerNews stories are you looking for? AI
Calling get_top_stories with: 20.
1. Swing VPN app is a DDoS botnet - https://lecromee.github.io/posts/swing_vpn_ddosing_sites/
2. Goodbye, Twilio - https://blog.miguelgrinberg.com/post/goodbye-twilio
3. OpenLLM - https://github.com/bentoml/OpenLLM
4. *Show HN: Answer Overflow – Indexing Discord content into the web* - https://www.answeroverflow.com/
5. HDR QR Code - https://notes.dt.in.th/HDRQRCode
6. *Keycloak – Open-source identity and access management interview* - https://console.substack.com/i/128451029/interview-with-michal-of-keycloak-open-source-identity-and-access-management-for-modern-applications
7. We tried to book a train ticket and ended up with a 245k records data breach - https://zerforschung.org/posts/freundschaftspass-en/
8. Releasing an indie game on 3 consoles at once and failing financially (2016) - https://juicybeast.com/2016/01/11/releasing-an-indie-game-on-3-consoles-at-once-and-failing-financially/#2-years
9. Reddit CEO Triples Down, Insults, Whines - https://www.techdirt.com/2023/06/16/reddit-ceo-triples-down-insults-protesters-whines-about-not-making-enough-money-from-reddit-users/
10. Soviet Union sold titanium to US believing they needed it for pizza ovens - https://theaviationgeekclub.com/in-the-early-1960s-soviet-union-sold-titanium-to-the-us-believing-they-needed-it-for-pizza-ovens-but-instead-they-used-it-to-build-the-iconic-sr-71-blackbird-mach-3-spy-plane/

The AI-related entries are marked with a star (*) in front of the title.
import sys
import json
import requests
from datetime import datetime
import ast
import inspect
import openai
import inspect
import ast
class FunctionWrapper:
def __init__(self, func):
self.func = func
self.info = self.extract_function_info()
def extract_function_info(self):
source = inspect.getsource(self.func)
tree = ast.parse(source)
# Extract function name
function_name = tree.body[0].name
# Extract function description from docstring
function_description = self.extract_description_from_docstring(self.func.__doc__)
# Extract function arguments and their types
args = tree.body[0].args
parameters = {"type": "object", "properties": {}}
for arg in args.args:
argument_name = arg.arg
argument_type = self.extract_parameter_type(argument_name, self.func.__doc__)
parameter_description = self.extract_parameter_description(argument_name, self.func.__doc__)
parameters["properties"][argument_name] = {
"type": argument_type,
"description": parameter_description,
}
# Extract function return type
return_type = None
if tree.body[0].returns:
return_type = ast.get_source_segment(source, tree.body[0].returns)
function_info = {
"name": function_name,
"description": function_description,
"parameters": {
"type": "object",
"properties": parameters["properties"],
"required": list(parameters["properties"].keys()),
},
"return_type": return_type,
}
return function_info
def extract_description_from_docstring(self, docstring):
if docstring:
lines = docstring.strip().split("\n")
description_lines = []
for line in lines:
line = line.strip()
if line.startswith(":param") or line.startswith(":type") or line.startswith(":return"):
break
if line:
description_lines.append(line)
return "\n".join(description_lines)
return None
def extract_parameter_type(self, parameter_name, docstring):
if docstring:
type_prefix = f":type {parameter_name}:"
lines = docstring.strip().split("\n")
for line in lines:
line = line.strip()
if line.startswith(type_prefix):
return line.replace(type_prefix, "").strip()
return None
def extract_parameter_description(self, parameter_name, docstring):
if docstring:
param_prefix = f":param {parameter_name}:"
lines = docstring.strip().split("\n")
for line in lines:
line = line.strip()
if line.startswith(param_prefix):
return line.replace(param_prefix, "").strip()
return None
# Rest of the class implementation...
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
def function(self):
return self.info
def function_info(func):
return FunctionWrapper(func)
def ai(function_name="", query=""):
function_function = globals().get(function_name)
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo-0613",
messages=[{"role": "user", "content": query}],
functions=[function_function.function()],
function_call="auto",
)
message = response["choices"][0]["message"]
# Step 2, check if the model wants to call a function
if message.get("function_call"):
function_name = message["function_call"]["name"]
function_function = globals().get(function_name)
# test we have the function
if function_function is None:
print("Couldn't find the function!")
sys.exit()
# Step 3, get the function information using the decorator
function_info = function_function.function()
# Extract function call arguments from the message
function_call_args = json.loads(message["function_call"]["arguments"])
# Filter function call arguments based on available properties
filtered_args = {}
for arg, value in function_call_args.items():
if arg in function_info["parameters"]["properties"]:
filtered_args[arg] = value
# Step 3, call the function
# Note: the JSON response from the model may not be valid JSON
function_response = function_function(**filtered_args)
# Step 4, send model the info on the function call and function response
second_response = openai.ChatCompletion.create(
model="gpt-3.5-turbo-0613",
messages=[
{"role": "user", "content": query},
message,
{
"role": "function",
"name": function_name,
"content": json.dumps(function_response)
},
],
)
return second_response
@function_info
def collatz_sequence(n:int) -> list[int]:
"""
Generates the Collatz sequence for a given number.
:param n: The starting number of the sequence.
:type n: integer
:return: list of integers of the sequence.
:rtype: list[int]
Example:
>>> collatz_sequence(6)
[6, 3, 10, 5, 16, 8, 4, 2, 1]
"""
try:
n = int(n)
except:
n = 6
sequence = [n]
while n != 1:
if n % 2 == 0:
n = n // 2
else:
n = 3 * n + 1
sequence.append(n)
return sequence
### replace this with any function and change the function name in the ai() call below
##
@function_info
def get_top_stories(num_stories:int) -> dict[str,str]:
"""
Fetches the top stories from Hacker News using Algolia's search API.
:param num_stories: The number of top stories to fetch.
:type num_stories: integer
:return: A dictionary containing the top stories with their titles and URLs.
:rtype: dict[str, str]
"""
base_url = "https://hn.algolia.com/api/v1/search"
params = {
"tags": "story",
"numericFilters": "created_at_i>{0},created_at_i<{1}".format(
int(datetime.now().strftime("%s")) - 86400, # 24 hours ago
int(datetime.now().strftime("%s")) # current timestamp
),
"hitsPerPage": num_stories
}
response = requests.get(base_url, params=params)
if response.status_code == 200:
data = response.json()
# Create a dictionary to store the stories
document = {}
# Iterate over the top stories and extract title and URL
for index, hit in enumerate(data["hits"], start=1):
title = hit["title"]
url = hit["url"]
document[index] = {"title": title, "url": url}
return document
else:
return None
#
##
### end custom function
# add token to openai
from config import openai_token
openai.api_key = openai_token
# user input
query = input("What type of HackerNews stories are you looking for? ")
# run the conversation
for choice in ai("get_top_stories", f"Looking at the top 20 HackerNews stories, show the title and a URL in a list for console output, and mark {query} related entries with a star (*). Do not put links in brackets or parentheses. \n").get('choices'):
# Render Markdown to HTML
print(choice.get('message').get('content'))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment