Last active
July 24, 2024 09:49
-
-
Save alonsosilvaallende/907cbc31bc0e2f9c46a8e4371fcd6a87 to your computer and use it in GitHub Desktop.
Llama-3_1_Espanol
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"cells": [ | |
{ | |
"cell_type": "code", | |
"execution_count": 1, | |
"id": "de67e678-6d98-4fb3-92d5-c5a9c8a57c2a", | |
"metadata": { | |
"execution": { | |
"iopub.execute_input": "2024-07-24T09:48:24.972128Z", | |
"iopub.status.busy": "2024-07-24T09:48:24.971378Z", | |
"iopub.status.idle": "2024-07-24T09:48:24.983673Z", | |
"shell.execute_reply": "2024-07-24T09:48:24.981538Z", | |
"shell.execute_reply.started": "2024-07-24T09:48:24.972073Z" | |
} | |
}, | |
"outputs": [], | |
"source": [ | |
"# Correr sólo una vez para descargar el modelo\n", | |
"# !wget https://huggingface.co/MaziyarPanahi/Meta-Llama-3.1-8B-Instruct-GGUF/resolve/main/Meta-Llama-3.1-8B-Instruct.Q4_K_M.gguf" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 2, | |
"id": "2d2a2e77-537d-49f4-bec7-b98be4f61b73", | |
"metadata": { | |
"execution": { | |
"iopub.execute_input": "2024-07-24T09:48:28.086624Z", | |
"iopub.status.busy": "2024-07-24T09:48:28.086117Z", | |
"iopub.status.idle": "2024-07-24T09:48:28.092247Z", | |
"shell.execute_reply": "2024-07-24T09:48:28.091041Z", | |
"shell.execute_reply.started": "2024-07-24T09:48:28.086573Z" | |
} | |
}, | |
"outputs": [], | |
"source": [ | |
"# Installar llama-cpp-python\n", | |
"# !python -m pip install --upgrade llama-cpp-python" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 3, | |
"id": "ea277ad4-7fd4-4b94-b548-8c27f2a5c2e8", | |
"metadata": { | |
"execution": { | |
"iopub.execute_input": "2024-07-24T09:48:44.779123Z", | |
"iopub.status.busy": "2024-07-24T09:48:44.778569Z", | |
"iopub.status.idle": "2024-07-24T09:48:46.346221Z", | |
"shell.execute_reply": "2024-07-24T09:48:46.345285Z", | |
"shell.execute_reply.started": "2024-07-24T09:48:44.779069Z" | |
} | |
}, | |
"outputs": [], | |
"source": [ | |
"from llama_cpp import Llama\n", | |
"\n", | |
"llm = Llama(\n", | |
" \"/big_storage/llms/Meta-Llama-3.1-8B-Instruct.Q4_K_M.gguf\", # remplazar con /directorio/del/modelo\n", | |
" n_gpu_layers=-1,\n", | |
" flash_attn=True,\n", | |
" n_ctx=8192,\n", | |
" verbose=False,\n", | |
")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 4, | |
"id": "b6f9ef05-ea10-45b6-9740-5280cb267db2", | |
"metadata": { | |
"execution": { | |
"iopub.execute_input": "2024-07-24T09:49:04.670931Z", | |
"iopub.status.busy": "2024-07-24T09:49:04.670162Z", | |
"iopub.status.idle": "2024-07-24T09:49:09.407702Z", | |
"shell.execute_reply": "2024-07-24T09:49:09.406966Z", | |
"shell.execute_reply.started": "2024-07-24T09:49:04.670871Z" | |
} | |
}, | |
"outputs": [ | |
{ | |
"data": { | |
"text/markdown": [ | |
"Sí, puedes utilizar la siguiente expresión regular en Python:\n", | |
"\n", | |
"```python\n", | |
"import re\n", | |
"\n", | |
"cadena = \"Este es un ejemplo de <tool_call> y otro texto\"\n", | |
"patron = r'<tool_call>'\n", | |
"resultado = re.search(patron, cadena)\n", | |
"\n", | |
"if resultado:\n", | |
" print(\"Se encontró el patrón\")\n", | |
"else:\n", | |
" print(\"No se encontró el patrón\")\n", | |
"```\n", | |
"\n", | |
"En este código, `re.search()` busca la primera ocurrencia del patrón en la cadena. Si lo encuentra, imprime \"Se encontró el patrón\", si no lo encuentra, imprime \"No se encontró el patrón\".\n", | |
"\n", | |
"Si deseas encontrar todas las ocurrencias del patrón en la cadena, puedes utilizar `re.findall()`:\n", | |
"\n", | |
"```python\n", | |
"import re\n", | |
"\n", | |
"cadena = \"Este es un ejemplo de <tool_call> y otro texto con <tool_call>\"\n", | |
"patron = r'<tool_call>'\n", | |
"resultados = re.findall(patron, cadena)\n", | |
"\n", | |
"if resultados:\n", | |
" print(\"Se encontraron los siguientes patrones:\")\n", | |
" for resultado in resultados:\n", | |
" print(resultado)\n", | |
"else:\n", | |
" print(\"No se encontró el patrón\")\n", | |
"```\n", | |
"\n", | |
"En este caso, `re.findall()` devuelve una lista con todas las ocurrencias del patrón en la cadena." | |
], | |
"text/plain": [ | |
"<IPython.core.display.Markdown object>" | |
] | |
}, | |
"execution_count": 4, | |
"metadata": {}, | |
"output_type": "execute_result" | |
} | |
], | |
"source": [ | |
"from IPython.display import Markdown\n", | |
"\n", | |
"response = llm.create_chat_completion(\n", | |
" messages=[\n", | |
" {\"role\": \"system\", \"content\": \"Eres un asistente útil.\"},\n", | |
" {\n", | |
" \"role\": \"user\",\n", | |
" \"content\": \"Puedes darme el código regex en Python para identificar el string '<tool_call>'.\",\n", | |
" },\n", | |
" ]\n", | |
")\n", | |
"Markdown(response[\"choices\"][0][\"message\"][\"content\"])" | |
] | |
} | |
], | |
"metadata": { | |
"kernelspec": { | |
"display_name": "Python 3 (ipykernel)", | |
"language": "python", | |
"name": "python3" | |
}, | |
"language_info": { | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"file_extension": ".py", | |
"mimetype": "text/x-python", | |
"name": "python", | |
"nbconvert_exporter": "python", | |
"pygments_lexer": "ipython3", | |
"version": "3.10.12" | |
} | |
}, | |
"nbformat": 4, | |
"nbformat_minor": 5 | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment