Skip to content

Commit

Permalink
remove extra code
Browse files Browse the repository at this point in the history
Signed-off-by: Sukriti-Sharma4 <[email protected]>
  • Loading branch information
Ssukriti committed Sep 19, 2024
1 parent 4905832 commit 94f42d4
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 10 deletions.
2 changes: 1 addition & 1 deletion tuning/sft_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,7 +669,7 @@ def main():
checkpoint_dir = training_args.save_model_dir
if checkpoint_dir:
print(f"Post processing LoRA adapters in {checkpoint_dir}")
post_process_vLLM_adapters_new_tokens(path_to_checkpoint=checkpoint_dir)
post_process_vLLM_adapters_new_tokens(path_to_checkpoint=checkpoint_dir, num_added_tokens=num_added_tokens)
except Exception as e: # pylint: disable=broad-except
logging.error(traceback.format_exc())
write_termination_log(
Expand Down
9 changes: 0 additions & 9 deletions tuning/utils/merge_model_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,15 +126,6 @@ def post_process_vLLM_adapters_new_tokens(
if not modified_checkpoint_path:
modified_checkpoint_path = path_to_checkpoint

# Get all values of new token indexes
sorted_token_indexes = []
if os.path.isfile(os.path.join(path_to_checkpoint, "added_tokens.json")):
with open(
os.path.join(path_to_checkpoint, "added_tokens.json"), "r", encoding="utf-8"
) as fp:
added_tokens = json.load(fp)
sorted_token_indexes = sorted(added_tokens.values())

with safe_open(
os.path.join(path_to_checkpoint, "adapter_model.safetensors"), framework="pt"
) as f:
Expand Down

0 comments on commit 94f42d4

Please sign in to comment.