Last active
July 7, 2024 03:45
-
-
Save CurtisAccelerate/281e5fa44760acbe866a2c905512ed77 to your computer and use it in GitHub Desktop.
Jupyter to LLM Integration
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#NoEnv ; Recommended for performance and compatibility with future AutoHotkey releases. | |
SendMode Input ; Recommended for new scripts due to its superior speed and reliability. | |
SetWorkingDir %A_ScriptDir% ; Ensures a consistent starting directory. | |
::..:: | |
SendInput `%sendchat{shift}+{enter} | |
return | |
::.c:: | |
SendInput `%sendchat{space} Context Python:{space} | |
return | |
::.l:: | |
SendInput `%sendchat --last{space} | |
return | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This project allows you to interface chat your Jupyter Notebook to LLM running locally and get real-time help for erros. | |
SendChat allows you to chat your local LLM, send the last cell (with any errors), or the last N cells in the notebook. | |
1. Requirements: lm studio, AutoHotKey, Jupyter Notebook | |
2. Start server in LM studio | |
3. Copy/paste the magic command into your notebook. Update the port/model to match your config. | |
4. Load your AutoHotKey AHK script. | |
5. %sendchat or use AHK abbreviations / expansions dot dot, dot c, and dot l for auto expansion. | |
Additional notes: | |
Llama emitted the code correctly formatted without any special instructions. Gemma gave a lot of trouble with formatting. | |
DeepSeek V2 was able to emit some correct code that Llama failed at. Rcommend starting with llama or deepseek v2 then trying other models. | |
Following prompt or variations may be useful. | |
You are assistant providing help inside a jupyter notebook. Assume Python all code. Be Concise. All code shall be emitted in triple backtick format: | |
``` | |
def hello_world(): | |
print("Hello, World!") | |
hello_world() | |
``` <-- important | |
Always emit a newline after Python. Always emit triple backticks to close code blocks. |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import requests | |
import json | |
import re | |
from IPython.core.magic import register_line_cell_magic | |
from IPython.display import display, Markdown | |
from IPython import get_ipython | |
@register_line_cell_magic | |
def sendchat(line, cell=None): | |
""" | |
Magic command to interact with LM Studio model. | |
Usage: | |
%sendchat Your question here | |
or | |
%sendchat | |
to send the content of the last added cell | |
or | |
%sendchat --last [n] | |
to send the content of the last n added cells | |
""" | |
url = "http://localhost:9000/v1/chat/completions" | |
headers = {"Content-Type": "application/json"} | |
try: | |
if line.strip().startswith("--last"): | |
match = re.match(r"--last(?:\s+(\d+))?", line.strip()) | |
n = int(match.group(1) or 1) | |
content = get_previous_cells_content(n) | |
elif not line.strip() and not cell: | |
# This case handles %sendchat with no input, same as --last 1 | |
content = get_previous_cells_content(1) | |
else: | |
content = cell if cell is not None else line | |
if not content.strip(): | |
raise ValueError("No content to send. The last cell might be empty or contain only whitespace.") | |
print("Sending the following content to LM Studio:") | |
print(abbreviate_content(content)) | |
payload = { | |
"model": "lmstudio-community/Meta-Llama-3-70B-Instruct-GGUF", | |
"messages": [{"role": "user", "content": content}], | |
"temperature": 0.7, | |
"max_tokens": -1, | |
"stream": False | |
} | |
response = requests.post(url, headers=headers, data=json.dumps(payload)) | |
response.raise_for_status() | |
result = response.json() | |
model_reply = result['choices'][0]['message']['content'] | |
display(Markdown(f"**LM Studio:** {model_reply}")) | |
except ValueError as e: | |
print(f"Error: {str(e)}") | |
except requests.RequestException as e: | |
print(f"Error communicating with LM Studio: {str(e)}") | |
if e.response is not None: | |
print(f"Response content: {e.response.text}") | |
except Exception as e: | |
print(f"An unexpected error occurred: {str(e)}") | |
def get_previous_cells_content(n): | |
ip = get_ipython() | |
cells = ip.history_manager.get_tail(n=100, include_latest=True) | |
if not cells: | |
raise ValueError("No recent cells found") | |
# Filter out the magic command cells | |
relevant_cells = [cell for cell in cells if not cell[2].strip().startswith("%sendchat")] | |
if len(relevant_cells) < n: | |
raise ValueError(f"Not enough cells available. Requested {n}, but only found {len(relevant_cells)}") | |
selected_cells = relevant_cells[-n:] | |
content = [] | |
for session, line, cell_content in reversed(selected_cells): # Reverse to get most recent first | |
content.append(f"Cell content:\n{cell_content}") | |
# Attempt to get the error output for this cell | |
error_output = None | |
try: | |
error_output = ip.run_cell(cell_content).error_in_exec | |
except Exception as e: | |
error_output = str(e) | |
if error_output: | |
content.append(f"Error output:\n{error_output}") | |
return "\n\n".join(content) | |
def abbreviate_content(content, max_lines=10, max_line_length=80): | |
lines = content.split('\n') | |
if len(lines) > max_lines: | |
abbreviated = '\n'.join(lines[:max_lines]) + f"\n... ({len(lines) - max_lines} more lines)" | |
else: | |
abbreviated = '\n'.join(lines) | |
if len(abbreviated) > max_line_length * max_lines: | |
abbreviated = abbreviated[:max_line_length * max_lines] + "..." | |
return abbreviated | |
# Inform the user that the magic command is ready | |
print("SendChat magic command (%sendchat) is now available.") | |
print("Use %sendchat to send the last cell, %sendchat --last n to send the last n cells, or %sendchat Your text here to send custom text.") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment