|
import sys |
|
|
|
import json |
|
import requests |
|
from datetime import datetime |
|
|
|
import ast |
|
import inspect |
|
|
|
import openai |
|
|
|
import inspect |
|
import ast |
|
|
|
class FunctionWrapper: |
|
def __init__(self, func): |
|
self.func = func |
|
self.info = self.extract_function_info() |
|
|
|
def extract_function_info(self): |
|
source = inspect.getsource(self.func) |
|
tree = ast.parse(source) |
|
|
|
# Extract function name |
|
function_name = tree.body[0].name |
|
|
|
# Extract function description from docstring |
|
function_description = self.extract_description_from_docstring(self.func.__doc__) |
|
|
|
# Extract function arguments and their types |
|
args = tree.body[0].args |
|
parameters = {"type": "object", "properties": {}} |
|
for arg in args.args: |
|
argument_name = arg.arg |
|
argument_type = self.extract_parameter_type(argument_name, self.func.__doc__) |
|
parameter_description = self.extract_parameter_description(argument_name, self.func.__doc__) |
|
parameters["properties"][argument_name] = { |
|
"type": argument_type, |
|
"description": parameter_description, |
|
} |
|
|
|
# Extract function return type |
|
return_type = None |
|
if tree.body[0].returns: |
|
return_type = ast.get_source_segment(source, tree.body[0].returns) |
|
|
|
function_info = { |
|
"name": function_name, |
|
"description": function_description, |
|
"parameters": { |
|
"type": "object", |
|
"properties": parameters["properties"], |
|
"required": list(parameters["properties"].keys()), |
|
}, |
|
"return_type": return_type, |
|
} |
|
|
|
return function_info |
|
|
|
def extract_description_from_docstring(self, docstring): |
|
if docstring: |
|
lines = docstring.strip().split("\n") |
|
description_lines = [] |
|
for line in lines: |
|
line = line.strip() |
|
if line.startswith(":param") or line.startswith(":type") or line.startswith(":return"): |
|
break |
|
if line: |
|
description_lines.append(line) |
|
return "\n".join(description_lines) |
|
return None |
|
|
|
def extract_parameter_type(self, parameter_name, docstring): |
|
if docstring: |
|
type_prefix = f":type {parameter_name}:" |
|
lines = docstring.strip().split("\n") |
|
for line in lines: |
|
line = line.strip() |
|
if line.startswith(type_prefix): |
|
return line.replace(type_prefix, "").strip() |
|
return None |
|
|
|
def extract_parameter_description(self, parameter_name, docstring): |
|
if docstring: |
|
param_prefix = f":param {parameter_name}:" |
|
lines = docstring.strip().split("\n") |
|
for line in lines: |
|
line = line.strip() |
|
if line.startswith(param_prefix): |
|
return line.replace(param_prefix, "").strip() |
|
return None |
|
|
|
# Rest of the class implementation... |
|
def __call__(self, *args, **kwargs): |
|
return self.func(*args, **kwargs) |
|
|
|
def function(self): |
|
return self.info |
|
|
|
|
|
def function_info(func): |
|
return FunctionWrapper(func) |
|
|
|
|
|
def ai(function_name="", query=""): |
|
function_function = globals().get(function_name) |
|
|
|
response = openai.ChatCompletion.create( |
|
model="gpt-3.5-turbo-0613", |
|
messages=[{"role": "user", "content": query}], |
|
functions=[function_function.function()], |
|
function_call="auto", |
|
) |
|
|
|
message = response["choices"][0]["message"] |
|
|
|
# Step 2, check if the model wants to call a function |
|
if message.get("function_call"): |
|
function_name = message["function_call"]["name"] |
|
|
|
function_function = globals().get(function_name) |
|
|
|
# test we have the function |
|
if function_function is None: |
|
print("Couldn't find the function!") |
|
sys.exit() |
|
|
|
# Step 3, get the function information using the decorator |
|
function_info = function_function.function() |
|
|
|
# Extract function call arguments from the message |
|
function_call_args = json.loads(message["function_call"]["arguments"]) |
|
|
|
# Filter function call arguments based on available properties |
|
filtered_args = {} |
|
for arg, value in function_call_args.items(): |
|
if arg in function_info["parameters"]["properties"]: |
|
filtered_args[arg] = value |
|
|
|
# Step 3, call the function |
|
# Note: the JSON response from the model may not be valid JSON |
|
function_response = function_function(**filtered_args) |
|
|
|
# Step 4, send model the info on the function call and function response |
|
second_response = openai.ChatCompletion.create( |
|
model="gpt-3.5-turbo-0613", |
|
messages=[ |
|
{"role": "user", "content": query}, |
|
message, |
|
{ |
|
"role": "function", |
|
"name": function_name, |
|
"content": json.dumps(function_response) |
|
}, |
|
], |
|
) |
|
return second_response |
|
|
|
@function_info |
|
def collatz_sequence(n:int) -> list[int]: |
|
""" |
|
Generates the Collatz sequence for a given number. |
|
|
|
:param n: The starting number of the sequence. |
|
:type n: integer |
|
:return: list of integers of the sequence. |
|
:rtype: list[int] |
|
|
|
Example: |
|
>>> collatz_sequence(6) |
|
[6, 3, 10, 5, 16, 8, 4, 2, 1] |
|
""" |
|
try: |
|
n = int(n) |
|
except: |
|
n = 6 |
|
|
|
sequence = [n] |
|
while n != 1: |
|
if n % 2 == 0: |
|
n = n // 2 |
|
else: |
|
n = 3 * n + 1 |
|
sequence.append(n) |
|
|
|
return sequence |
|
|
|
### replace this with any function and change the function name in the ai() call below |
|
## |
|
|
|
@function_info |
|
def get_top_stories(num_stories:int) -> dict[str,str]: |
|
""" |
|
Fetches the top stories from Hacker News using Algolia's search API. |
|
|
|
:param num_stories: The number of top stories to fetch. |
|
:type num_stories: integer |
|
:return: A dictionary containing the top stories with their titles and URLs. |
|
:rtype: dict[str, str] |
|
""" |
|
|
|
base_url = "https://hn.algolia.com/api/v1/search" |
|
params = { |
|
"tags": "story", |
|
"numericFilters": "created_at_i>{0},created_at_i<{1}".format( |
|
int(datetime.now().strftime("%s")) - 86400, # 24 hours ago |
|
int(datetime.now().strftime("%s")) # current timestamp |
|
), |
|
"hitsPerPage": num_stories |
|
} |
|
|
|
response = requests.get(base_url, params=params) |
|
if response.status_code == 200: |
|
data = response.json() |
|
|
|
# Create a dictionary to store the stories |
|
document = {} |
|
|
|
# Iterate over the top stories and extract title and URL |
|
for index, hit in enumerate(data["hits"], start=1): |
|
title = hit["title"] |
|
url = hit["url"] |
|
document[index] = {"title": title, "url": url} |
|
|
|
return document |
|
else: |
|
return None |
|
# |
|
## |
|
### end custom function |
|
|
|
# add token to openai |
|
from config import openai_token |
|
openai.api_key = openai_token |
|
|
|
# user input |
|
query = input("What type of HackerNews stories are you looking for? ") |
|
|
|
# run the conversation |
|
for choice in ai("get_top_stories", f"Looking at the top 20 HackerNews stories, show the title and a URL in a list for console output, and mark {query} related entries with a star (*). Do not put links in brackets or parentheses. \n").get('choices'): |
|
# Render Markdown to HTML |
|
print(choice.get('message').get('content')) |
|
|
|
|