Skip to content

Commit

Permalink
Fixed template and prompt generation
Browse files Browse the repository at this point in the history
  • Loading branch information
kannangce committed Nov 1, 2023
1 parent 3717ee6 commit e127485
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions generate/lora_ui_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@
lora_mlp = False
lora_head = False

TEMPLATE = Template('''<s>
TEMPLATE = Template('''
<<SYS>>The response MUST be a valid JSON. Generate UI-DSL for the below input and context.<</SYS>>
{%- if context -%}## Input: {{ context }}{%- endif -%}
[INS]## Instruction: {{ prompt }}[/INS]
## Response: ```{{ response }}```
</s>''')
## Response:
''')

def main(
prompt: str = "What food do lamas eat?",
Expand Down Expand Up @@ -153,7 +153,7 @@ def main(
t = time.perf_counter() - t0

output = tokenizer.decode(y)
output = output.split("### Response:")[1].strip()
# output = output.split("### Response:")[1].strip()
fabric.print(output)

tokens_generated = y.size(0) - prompt_length
Expand All @@ -164,7 +164,7 @@ def main(
def generate_prompt(example: dict) -> str:
"""Generates a standardized message to prompt the model with an instruction, optional input and a
'response' field."""
return TEMPLATE.render(prompt=example['instruction'], response=example['output'], context=example.get('input', ''))
return TEMPLATE.render(prompt=example['instruction'], context=example.get('input', ''))

if __name__ == "__main__":
from jsonargparse import CLI
Expand Down

0 comments on commit e127485

Please sign in to comment.