This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#pragma once | |
#include <coroutine> | |
#include <utility> | |
#include <functional> | |
struct NecroAiState; | |
struct NecroAiState | |
{ | |
struct promise_type; |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
let cube_mesh_handle: Handle<Mesh> = meshes.add(Rectangle::new(6., 9.)); | |
let cube_mesh_handle2: Handle<Mesh> = meshes.add(Rectangle::new(6., 9.)); | |
commands.spawn(( | |
SpatialBundle { | |
transform: Transform::default(), | |
..default() | |
}, | |
ParentMarker, | |
// On::<Pointer<Drag>>::listener_component_mut::<Transform>(|drag, transform| { |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
use std::io::Write; | |
use tokenizers::Tokenizer; | |
use candle_core::quantized::{gguf_file}; | |
use candle_core::Device; | |
use candle_transformers::generation::{LogitsProcessor, Sampling}; | |
use candle_transformers::models::quantized_llama as model; | |
use model::ModelWeights; | |
use burnt_wick::streamable_model::StreamableModel; | |
fn load_model_and_tokenizer( |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
use std::io::Write; | |
use tokenizers::Tokenizer; | |
use candle_core::quantized::{gguf_file}; | |
use candle_core::Tensor; | |
use candle_core::Device; | |
use candle_transformers::generation::{LogitsProcessor, Sampling}; | |
use candle_transformers::models::quantized_llama as model; | |
use candle_examples::token_output_stream::TokenOutputStream; | |
use model::ModelWeights; | |
use std::collections::HashMap; |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from flask import Flask, render_template | |
import torch | |
from flask_socketio import SocketIO, emit | |
from generation.make_instruct import get_generator_func | |
from generation.exllama_generator_wrapper import encode_message, encode_system, encode_header, encode_header_prefilled, encode_message_with_eot, encode_completion | |
from collections import deque | |
import time | |
import os,sys | |
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
for text, inst, summary in zip(text, instructions, previous_summary): | |
if summary != "": | |
summarized_augmentation = random.choice(augmented_continuation) | |
inst = f"{inst}\n\n{summarized_augmentation} {summary}" | |
next_prompt = copy.deepcopy(enc_sys_prompt) | |
next_message = encode_message(tokenizer, "user", inst) | |
next_prompt.extend(next_message) | |
mask_length = len(next_prompt) | |
next_prompt.extend(encode_message_english_sentence_truncate(tokenizer, "assistant", text, mask_length, 8150)) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from flask import Flask, render_template | |
import torch | |
from flask_socketio import SocketIO, emit | |
from generation.make_instruct import get_generator_func | |
from generation.exllama_generator_wrapper import encode_message, encode_system, encode_header | |
import os,sys | |
app = Flask(__name__) | |
socketio = SocketIO(app) | |
system_prompt = "Respond to all inputs with EEE" |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from transformers import AutoTokenizer | |
import json | |
import sys | |
model = "/home/blackroot/Desktop/llama3-8b/llama-3-8b" | |
max_tokens = 8192 | |
def count_tokens_hf(text: str, model_name: str) -> int: | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
encoded_input = tokenizer.encode(text) |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.