Skip to content

Commit

Permalink
update test names
Browse files Browse the repository at this point in the history
  • Loading branch information
Felipe Mello committed Dec 3, 2024
1 parent 839e342 commit 40419fd
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 8 deletions.
8 changes: 4 additions & 4 deletions tests/recipes/test_knowledge_distillation_distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def test_loss(self, tmpdir, monkeypatch):

cmd = f"""
tune run --nnodes 1 --nproc_per_node 2 knowledge_distillation_distributed \
--config llama3_2/knowledge_distillation_distributed \
--config llama3_2/8B_to_1B_KD_distributed \
output_dir={tmpdir} \
checkpointer._component_=torchtune.training.FullModelTorchTuneCheckpointer \
checkpointer.checkpoint_dir='{ckpt_dir}' \
Expand Down Expand Up @@ -120,7 +120,7 @@ def test_training_state_on_resume(self, tmpdir, monkeypatch):
# Train for two epochs
cmd_1 = f"""
tune run --nnodes 1 --nproc_per_node 2 knowledge_distillation_distributed \
--config llama3_2/knowledge_distillation_distributed \
--config llama3_2/8B_to_1B_KD_distributed \
output_dir={tmpdir} \
checkpointer=torchtune.training.FullModelTorchTuneCheckpointer \
checkpointer.checkpoint_dir='{ckpt_dir}' \
Expand Down Expand Up @@ -148,7 +148,7 @@ def test_training_state_on_resume(self, tmpdir, monkeypatch):
# Resume training
cmd_2 = f"""
tune run --nnodes 1 --nproc_per_node 2 knowledge_distillation_distributed \
--config llama3_2/knowledge_distillation_distributed \
--config llama3_2/8B_to_1B_KD_distributed \
output_dir={tmpdir} \
checkpointer=torchtune.training.FullModelTorchTuneCheckpointer \
checkpointer.checkpoint_dir={tmpdir} \
Expand Down Expand Up @@ -199,7 +199,7 @@ def test_save_and_load_merged_weights(self, tmpdir, monkeypatch):

cmd = f"""
tune run --nnodes 1 --nproc_per_node 2 knowledge_distillation_distributed \
--config llama3_2/knowledge_distillation_distributed \
--config llama3_2/8B_to_1B_KD_distributed \
output_dir={tmpdir} \
checkpointer._component_={ckpt_component} \
checkpointer.checkpoint_dir='{ckpt_dir}' \
Expand Down
8 changes: 4 additions & 4 deletions tests/recipes/test_knowledge_distillation_single_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def test_loss(
tmpdir,
monkeypatch,
):
config = "qwen2/knowledge_distillation_single_device"
config = "qwen2/1.5_to_0.5B_KD_single_device"
model_type = "llama3"
ckpt_type = "tune"
ckpt_component = CKPT_COMPONENT_MAP[ckpt_type]
Expand Down Expand Up @@ -152,7 +152,7 @@ def test_training_state_on_resume(self, tmpdir, monkeypatch):
# Train for two epochs
cmd_1 = f"""
tune run knowledge_distillation_single_device \
--config qwen2/knowledge_distillation_single_device \
--config qwen2/1.5_to_0.5B_KD_single_device \
output_dir={tmpdir} \
checkpointer=torchtune.training.FullModelTorchTuneCheckpointer \
checkpointer.checkpoint_dir='{ckpt_dir}' \
Expand Down Expand Up @@ -186,7 +186,7 @@ def test_training_state_on_resume(self, tmpdir, monkeypatch):
# Resume training
cmd_2 = f"""
tune run knowledge_distillation_single_device \
--config qwen2/knowledge_distillation_single_device \
--config qwen2/1.5_to_0.5B_KD_single_device \
output_dir={tmpdir} \
checkpointer=torchtune.training.FullModelTorchTuneCheckpointer \
checkpointer.checkpoint_dir={tmpdir} \
Expand Down Expand Up @@ -242,7 +242,7 @@ def test_save_and_load_merged_weights(self, tmpdir, monkeypatch):

cmd = f"""
tune run knowledge_distillation_single_device \
--config qwen2/knowledge_distillation_single_device \
--config qwen2/1.5_to_0.5B_KD_single_device \
output_dir={tmpdir} \
checkpointer._component_={ckpt_component} \
checkpointer.checkpoint_dir='{ckpt_dir}' \
Expand Down

0 comments on commit 40419fd

Please sign in to comment.