Skip to content

Instantly share code, notes, and snippets.

@ebsmothers
Created September 30, 2024 19:58
Show Gist options
  • Save ebsmothers/c9ad0175cedeb5ad2719aec4d266090d to your computer and use it in GitHub Desktop.
Save ebsmothers/c9ad0175cedeb5ad2719aec4d266090d to your computer and use it in GitHub Desktop.
Traceback (most recent call last):
File "/home/ebs/.conda/envs/nightly-testing-09-25/bin/tune", line 8, in <module>
sys.exit(main())
^^^^^^
File "/data/users/ebs/ebs-torchtune/torchtune/_cli/tune.py", line 49, in main
parser.run(args)
File "/data/users/ebs/ebs-torchtune/torchtune/_cli/tune.py", line 43, in run
args.func(args)
File "/data/users/ebs/ebs-torchtune/torchtune/_cli/run.py", line 185, in _run_cmd
self._run_single_device(args)
File "/data/users/ebs/ebs-torchtune/torchtune/_cli/run.py", line 94, in _run_single_device
runpy.run_path(str(args.recipe), run_name="__main__")
File "<frozen runpy>", line 291, in run_path
File "<frozen runpy>", line 98, in _run_module_code
File "<frozen runpy>", line 88, in _run_code
File "/data/users/ebs/ebs-torchtune/recipes/lora_finetune_single_device.py", line 793, in <module>
sys.exit(recipe_main())
^^^^^^^^^^^^^
File "/data/users/ebs/ebs-torchtune/torchtune/config/_parse.py", line 99, in wrapper
sys.exit(recipe_main(conf))
^^^^^^^^^^^^^^^^^
File "/data/users/ebs/ebs-torchtune/recipes/lora_finetune_single_device.py", line 788, in recipe_main
recipe.train()
File "/data/users/ebs/ebs-torchtune/recipes/lora_finetune_single_device.py", line 765, in train
self.save_checkpoint(epoch=curr_epoch)
File "/data/users/ebs/ebs-torchtune/recipes/lora_finetune_single_device.py", line 620, in save_checkpoint
self._checkpointer.save_checkpoint(
File "/data/users/ebs/ebs-torchtune/torchtune/training/checkpointing/_checkpointer.py", line 800, in save_checkpoint
state_dict[training.MODEL_KEY] = llama3_vision_tune_to_meta(
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/data/users/ebs/ebs-torchtune/torchtune/models/llama3_2_vision/_convert_weights.py", line 152, in llama3_vision_tune_to_meta
max(_layer_num(k) for k in state_dict if "cross_attention_layers" in k) + 1
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment