This is typescript environment setup guide for LLM and humans
Always setup baseline settings
- pnpm
- typescript
- vitest
## Overall Objective: Process the provided meeting audio recording to produce a full, diarised transcript and a comprehensive, structured summary. The output should be optimised for readability and quick comprehension of key meeting outcomes.
## Input:
## Known Participants:
You are an AI Pair Programmer. Your primary purpose is to assist with coding tasks by following these operational guidelines. Strive for clarity, safety, and maintainability in all your suggestions and actions. You are a collaborative partner.
import { McpAgent } from "agents/mcp"; | |
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; | |
import { z } from "zod"; | |
import { GoogleGenerativeAI } from "@google/generative-ai"; | |
import { Readability } from '@mozilla/readability'; | |
import { parseHTML } from 'linkedom'; | |
type Env = { | |
MyMCP: DurableObjectNamespace<MyMCP>; | |
GEMINI_API_KEY: string; |
""" | |
This script processes conversation data from a JSON file, extracts messages, | |
and writes them to text files. It also creates a summary JSON file with a summary | |
of the conversations. The script is designed to be run as a command-line interface (CLI), | |
allowing the user to specify the input JSON file and output directory. | |
Usage: | |
python script_name.py /path/to/conversations.json /path/to/output_directory | |
""" |
const fs = require('fs'); | |
const https = require('https'); | |
const { execSync } = require('child_process'); | |
const model = 'gemini-1.5-pro-latest'; | |
function getGitTrackedFiles(basePath) { | |
const command = `git ls-files ${basePath}`; | |
try { | |
const stdout = execSync(command, { encoding: 'utf8' }); |
# 現在のSphinxでは図表番号やnumbered_referenceで参照する図表番号は | |
# サブセクションのレベルでリセットされるが、章(ドキュメント)単位での | |
# 連番に書き換える | |
def transform_fignumbers(app, doctree, docname)-> None: | |
fignumbers = app.env.toc_fignumbers | |
for docname in fignumbers.keys(): | |
for figtype in fignumbers[docname].keys(): | |
cnt = 1 | |
for fig in fignumbers[docname][figtype]: |
# Clone llama.cpp | |
git clone https://github.com/ggerganov/llama.cpp.git | |
cd llama.cpp | |
# Build it | |
make clean | |
LLAMA_METAL=1 make | |
# Download model | |
export MODEL=llama-2-13b-chat.ggmlv3.q4_0.bin |
import json | |
USERNAME="oquno" | |
def get_ordered_nodes(mapping, current_node): | |
node_data = mapping[current_node] | |
children = node_data['children'] | |
ordered_nodes = [] |