Skip to content

Commit

Permalink
small test updates
Browse files Browse the repository at this point in the history
  • Loading branch information
ebsmothers committed Dec 10, 2024
1 parent 7dcabf5 commit 8ad46f9
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions tests/torchtune/modules/peft/test_dora.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def embed_dim(self):
def test_dora_distributed_init(self):
self.run_subtests(
{
"load_dora_weights": [True],
"load_dora_weights": [True, False],
},
self._test_dora_distributed_init,
)
Expand Down Expand Up @@ -327,8 +327,7 @@ def _test_dora_distributed_init(self, load_dora_weights):
assert dora_linear.magnitude.is_meta

# Optionally load adapter weights (as though we are resuming from checkpoint)
# Now lora_a, lora_b, and magnitude should not be on meta device, but base weight should be
# Additionally since the weights are randomly initialized we should have magnitude != ||W+(alpha/rank)BA||
# Now lora_a, lora_b, and magnitude should not be on meta device, but base weight should be.
if load_dora_weights:
training.load_from_full_model_state_dict(
ffn,
Expand Down

0 comments on commit 8ad46f9

Please sign in to comment.