Last active
May 28, 2024 04:30
-
-
Save kwindla/bea28ce3ffe10e130dbd272e2fc6037f to your computer and use it in GitHub Desktop.
Llama-3-70B config (works on one GPU no deepspeed; OOMs on multiple GPUs during merge)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
base_model: meta-llama/Meta-Llama-3-70B | |
# deepspeed: /workspace/axolotl/deepspeed_configs/zero3_bf16.json | |
load_in_8bit: false | |
load_in_4bit: true | |
strict: false | |
datasets: | |
# This will be the path used for the data when it is saved to the Volume in the cloud. | |
- path: data.jsonl | |
ds_type: json | |
type: input_output | |
dataset_prepared_path: last_run_prepared | |
val_set_size: 0.05 | |
output_dir: ./lora-out | |
sequence_len: 1024 | |
sample_packing: false | |
eval_sample_packing: false | |
pad_to_sequence_len: false | |
adapter: qlora | |
lora_model_dir: | |
# lora_r: 128 | |
lora_r: 16 | |
lora_alpha: 16 | |
# lora_modules_to_save: [embed_tokens, lm_head] | |
lora_dropout: 0.05 | |
# lora_target_linear: true | |
lora_fan_in_fan_out: | |
lora_target_modules: | |
- q_proj | |
- v_proj | |
# - k_proj | |
# - o_proj | |
# - gate_proj | |
# - down_proj | |
# - up_proj | |
wandb_project: khk-llama-3-70b | |
wandb_entity: | |
wandb_watch: | |
wandb_run_id: | |
gradient_accumulation_steps: 1 | |
micro_batch_size: 1 | |
num_epochs: 1 | |
# optimizer: adamw_torch | |
optimizer: adamw_8bit | |
lr_scheduler: constant | |
learning_rate: 1e-5 | |
train_on_inputs: false | |
group_by_length: false | |
bf16: auto | |
fp16: false | |
tf32: false | |
gradient_checkpointing: true | |
gradient_checkpointing_kwargs: | |
use_reentrant: false | |
early_stopping_patience: | |
resume_from_checkpoint: | |
local_rank: | |
logging_steps: 1 | |
xformers_attention: | |
flash_attention: true | |
warmup_steps: 100 | |
# evals_per_epoch: 5 | |
eval_steps: 1.0 | |
eval_batch_size: 1 | |
saves_per_epoch: 5 | |
save_total_limit: 10 | |
eval_table_size: | |
save_steps: | |
debug: | |
deepspeed: | |
weight_decay: 0.0 | |
fsdp: | |
fsdp_config: | |
special_tokens: | |
pad_token: <|end_of_text|> |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment