-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathgenerate_urd.py
54 lines (42 loc) · 1.65 KB
/
generate_urd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
from langchain.chains import LLMChain
from langchain.chat_models import ChatOpenAI
from langchain.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate
def generate_user_requirement_document(openai_api_key, answers):
system_message = """Given the information by the user, generate a User Requirement Document (URD). The URD should \
have a clear problem statement, goals, objectives to accomplish, features, and a conclusion paragraph. Reproduce the \
features part as it is in the response
Generate the features response in the following format:
Feature Name:
Feature Prerequisites:
Feature Description:
Sub-feature Name:
Sub-feature Prerequisites:
Sub-feature Description:
# Generate the response document in the markdown format
Must generate the title with the topic name
"""
human_message = """
Problem: {answer1}
Platform: {platform}
Goal: {answer2}
Features: {answer3}
Users: {answer4}
"""
system_message_prompt = SystemMessagePromptTemplate.from_template(system_message)
human_message_prompt = HumanMessagePromptTemplate.from_template(human_message)
chat_prompt = ChatPromptTemplate.from_messages(
[
system_message_prompt,
human_message_prompt
]
)
chat_model = ChatOpenAI(openai_api_key=openai_api_key, temperature=0.6, model="gpt-3.5-turbo-16k")
llm_chain = LLMChain(prompt=chat_prompt, llm=chat_model, verbose=True)
response = llm_chain.run(
answer1=answers["answer1"],
platform=answers["platform"],
answer2=answers["answer2"],
answer3=answers["answer3"],
answer4=answers["answer4"],
)
return response