From 2404c98c9f65bf94b962855e90277acbf6cfca6f Mon Sep 17 00:00:00 2001 From: pytorchbot Date: Thu, 31 Oct 2024 19:22:23 +0000 Subject: [PATCH] auto-generating sphinx docs --- main/_modules/torchtune/modules/peft/dora.html | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/main/_modules/torchtune/modules/peft/dora.html b/main/_modules/torchtune/modules/peft/dora.html index 7ca9869ee3..a06ffd8e00 100644 --- a/main/_modules/torchtune/modules/peft/dora.html +++ b/main/_modules/torchtune/modules/peft/dora.html @@ -508,7 +508,8 @@

Source code for torchtune.modules.peft.dora

         _lora_a_init_params(self.lora_a)
         _lora_b_init_params(self.lora_b)
 
-
[docs] def initialize_dora_magnitude(self): +
[docs] @torch.no_grad() + def initialize_dora_magnitude(self): """ DoRA initializes the magnitude vector such that its outputs are initially identical to standard LoRA's outputs. @@ -516,7 +517,7 @@

Source code for torchtune.modules.peft.dora

         base_weight = self.weight.to(self.lora_a.weight.dtype)
         lora_weight = self.lora_b.weight @ self.lora_a.weight
         weight_norm = self._get_weight_norm(base_weight, lora_weight)
-        self.magnitude = nn.Parameter(weight_norm, requires_grad=True)
+ self.magnitude.copy_(weight_norm)
def _create_weight_and_bias(self): """