Created
March 14, 2025 08:34
-
-
Save FrancescoCaracciolo/cc5d97baac7ed471f22b4854a9dac9a7 to your computer and use it in GitHub Desktop.
RamaLlama Extension
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| from .extensions import NewelleExtension | |
| from .handlers.llm import OpenAIHandler | |
| from .handlers import ExtraSettings | |
| class Ramallama(NewelleExtension): | |
| name = "Ramallama" | |
| id = "ramallama" | |
| def __init__(self, pip_path : str, extension_path: str, settings): | |
| super().__init__(pip_path, extension_path, settings) | |
| def get_llm_handlers(self) -> list[dict]: | |
| return [ | |
| { | |
| "key": "ramallama", | |
| "title": "RamaLlama", | |
| "description": "RamaLama tool facilitates local management and serving of AI Models.", | |
| "class": RamaLlamaHandler | |
| } | |
| ] | |
| class RamaLlamaHandler(OpenAIHandler): | |
| key = "ramallama" | |
| def __init__(self, settings, path): | |
| super().__init__(settings, path) | |
| def get_extra_settings(self) -> list: | |
| settings = [ | |
| ExtraSettings.EntrySetting("endpoint", _("API Endpoint"), _("API base url, change this to use interference APIs"), "http://localhost:8080/v1"), | |
| ExtraSettings.EntrySetting("model", _("Model to use"), _("Name of the model to use"), "llama3.1:8b"), | |
| ] | |
| settings += self.build_extra_settings("RamaLlama", False, True, False, True, False, None, None, False, False) | |
| return settings |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment