Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Create requirements.txt and streamlit webapp for demo #4

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 31 additions & 19 deletions model/api_selector.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
from langchain.prompts.base import BasePromptTemplate
from langchain.prompts.prompt import PromptTemplate
from langchain.llms.base import BaseLLM
import openai
from retry import retry

from utils import ReducedOpenAPISpec, get_matched_endpoint

Expand Down Expand Up @@ -106,7 +108,6 @@
API calling 1: {agent_scratchpad}"""



class APISelector(Chain):
llm: BaseLLM
api_spec: ReducedOpenAPISpec
Expand All @@ -115,14 +116,17 @@ class APISelector(Chain):
output_key: str = "result"

def __init__(self, llm: BaseLLM, scenario: str, api_spec: ReducedOpenAPISpec) -> None:
api_name_desc = [f"{endpoint[0]} {endpoint[1].split('.')[0] if endpoint[1] is not None else ''}" for endpoint in api_spec.endpoints]
api_name_desc = [
f"{endpoint[0]} {endpoint[1].split('.')[0] if endpoint[1] is not None else ''}" for endpoint in api_spec.endpoints]
api_name_desc = '\n'.join(api_name_desc)
api_selector_prompt = PromptTemplate(
template=API_SELECTOR_PROMPT,
partial_variables={"endpoints": api_name_desc, "icl_examples": icl_examples[scenario]},
partial_variables={"endpoints": api_name_desc,
"icl_examples": icl_examples[scenario]},
input_variables=["plan", "background", "agent_scratchpad"],
)
super().__init__(llm=llm, api_spec=api_spec, scenario=scenario, api_selector_prompt=api_selector_prompt)
super().__init__(llm=llm, api_spec=api_spec, scenario=scenario,
api_selector_prompt=api_selector_prompt)

@property
def _chain_type(self) -> str:
Expand All @@ -131,11 +135,11 @@ def _chain_type(self) -> str:
@property
def input_keys(self) -> List[str]:
return ["plan", "background"]

@property
def output_keys(self) -> List[str]:
return [self.output_key]

@property
def observation_prefix(self) -> str:
"""Prefix to append the observation with."""
Expand All @@ -145,14 +149,14 @@ def observation_prefix(self) -> str:
def llm_prefix(self) -> str:
"""Prefix to append the llm call with."""
return "API calling {}: "

@property
def _stop(self) -> List[str]:
return [
f"\n{self.observation_prefix.rstrip()}",
f"\n\t{self.observation_prefix.rstrip()}",
]

def _construct_scratchpad(
self, history: List[Tuple[str, str]], instruction: str
) -> str:
Expand All @@ -166,30 +170,38 @@ def _construct_scratchpad(
scratchpad += self.observation_prefix + execution_res + "\n"
scratchpad += "Instruction: " + instruction + "\n"
return scratchpad


@retry(exceptions=openai.error.RateLimitError, tries=3, delay=15, backoff=2)
def _call(self, inputs: Dict[str, Any]) -> Dict[str, str]:
# inputs: background, plan, (optional) history, instruction
if 'history' in inputs:
scratchpad = self._construct_scratchpad(inputs['history'], inputs['instruction'])
scratchpad = self._construct_scratchpad(
inputs['history'], inputs['instruction'])
else:
scratchpad = ""
api_selector_chain = LLMChain(llm=self.llm, prompt=self.api_selector_prompt)
api_selector_chain_output = api_selector_chain.run(plan=inputs['plan'], background=inputs['background'], agent_scratchpad=scratchpad, stop=self._stop)

api_plan = re.sub(r"API calling \d+: ", "", api_selector_chain_output).strip()
api_selector_chain = LLMChain(
llm=self.llm, prompt=self.api_selector_prompt)
api_selector_chain_output = api_selector_chain.run(
plan=inputs['plan'], background=inputs['background'], agent_scratchpad=scratchpad, stop=self._stop)

api_plan = re.sub(r"API calling \d+: ", "",
api_selector_chain_output).strip()

logger.info(f"API Selector: {api_plan}")

finish = re.match(r"No API call needed.(.*)", api_plan)
if finish is not None:
return {"result": api_plan}


while get_matched_endpoint(self.api_spec, api_plan) is None:
logger.info("API Selector: The API you called is not in the list of available APIs. Please use another API.")
scratchpad += api_selector_chain_output + "\nThe API you called is not in the list of available APIs. Please use another API.\n"
api_selector_chain_output = api_selector_chain.run(plan=inputs['plan'], background=inputs['background'], agent_scratchpad=scratchpad, stop=self._stop)
api_plan = re.sub(r"API calling \d+: ", "", api_selector_chain_output).strip()
logger.info(
"API Selector: The API you called is not in the list of available APIs. Please use another API.")
scratchpad += api_selector_chain_output + \
"\nThe API you called is not in the list of available APIs. Please use another API.\n"
api_selector_chain_output = api_selector_chain.run(
plan=inputs['plan'], background=inputs['background'], agent_scratchpad=scratchpad, stop=self._stop)
api_plan = re.sub(r"API calling \d+: ", "",
api_selector_chain_output).strip()
logger.info(f"API Selector: {api_plan}")

return {"result": api_plan}
3 changes: 3 additions & 0 deletions model/planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
from langchain.chains.llm import LLMChain
from langchain.prompts.prompt import PromptTemplate
from langchain.llms.base import BaseLLM
import openai
from retry import retry

icl_examples = {
"tmdb": """Example 1:
Expand Down Expand Up @@ -134,6 +136,7 @@ def _construct_scratchpad(
scratchpad += self.observation_prefix + execution_res + "\n"
return scratchpad

@retry(exceptions=openai.error.RateLimitError, tries=3, delay=15, backoff=2)
def _call(self, inputs: Dict[str, str]) -> Dict[str, str]:
scratchpad = self._construct_scratchpad(inputs['history'])
# print("Scrachpad: \n", scratchpad)
Expand Down
38 changes: 38 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
aiohttp==3.8.5
aiosignal==1.3.1
annotated-types==0.5.0
async-timeout==4.0.3
attrs==23.1.0
certifi==2023.7.22
charset-normalizer==3.2.0
colorama==0.4.6
dataclasses-json==0.5.14
frozenlist==1.4.0
greenlet==2.0.2
idna==3.4
langchain==0.0.292
langsmith==0.0.37
marshmallow==3.20.1
multidict==6.0.4
mypy-extensions==1.0.0
numexpr==2.8.6
numpy==1.26.0
openai==0.28.0
packaging==23.1
pydantic==2.3.0
pydantic_core==2.6.3
PyYAML==6.0.1
redis==5.0.0
regex==2023.8.8
requests==2.31.0
six==1.16.0
spotipy==2.23.0
SQLAlchemy==2.0.20
tenacity==8.2.3
tiktoken==0.5.1
tqdm==4.66.1
typing-inspect==0.9.0
typing_extensions==4.7.1
urllib3==2.0.4
yarl==1.9.2
retry==0.9.2
22 changes: 17 additions & 5 deletions run.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,26 @@

logger = logging.getLogger()

import streamlit as st

def main():
config = yaml.load(open('config.yaml', 'r'), Loader=yaml.FullLoader)
os.environ["OPENAI_API_KEY"] = config['openai_api_key']
os.environ["TMDB_ACCESS_TOKEN"] = config['tmdb_access_token']
os.environ['SPOTIPY_CLIENT_ID'] = config['spotipy_client_id']
os.environ['SPOTIPY_CLIENT_SECRET'] = config['spotipy_client_secret']
os.environ['SPOTIPY_REDIRECT_URI'] = config['spotipy_redirect_uri']

# Define a function to get values from st.secrets or fallback to config.yaml
def get_secret_or_config(key):
if key in st.secrets:
return st.secrets[key]
elif key in config:
return config[key]
else:
raise ValueError(f"Key '{key}' not found in secrets or config")

# Set environment variables
os.environ["OPENAI_API_KEY"] = get_secret_or_config('openai_api_key')
os.environ["TMDB_ACCESS_TOKEN"] = get_secret_or_config('TMDB_ACCESS_TOKEN')
os.environ['SPOTIPY_CLIENT_ID'] = get_secret_or_config('spotipy_client_id')
os.environ['SPOTIPY_CLIENT_SECRET'] = get_secret_or_config('spotipy_client_secret')
os.environ['SPOTIPY_REDIRECT_URI'] = get_secret_or_config('spotipy_redirect_uri')

logging.basicConfig(
format="%(message)s",
Expand Down
123 changes: 123 additions & 0 deletions streamlit_app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
import os
import json
import logging
import time
import yaml

import spotipy
from langchain.requests import Requests
from langchain import OpenAI

from utils import reduce_openapi_spec, ColorPrint
from model import RestGPT

logger = logging.getLogger()

import streamlit as st
st.title("RestGPT")
st.subheader("An LLM-based autonomous agent controlling real-world applications via RESTful APIs")
hide_streamlit_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
footer:after {
content:"Made with 💓 by Chandra Sekhar Mullu";
visibility: visible;
display: block;
position: relative;
#background-color: red;
padding: 5px;
top: 2px;
}
</style>
"""
st.markdown(hide_streamlit_style, unsafe_allow_html=True)

def main():
config = yaml.load(open('config.yaml', 'r'), Loader=yaml.FullLoader)

# Define a function to get values from st.secrets or fallback to config.yaml
def get_secret_or_config(key):
if key in st.secrets:
return st.secrets[key]
elif key in config and config[key] != '':
return config[key]
else:
raise ValueError(f"Key '{key}' not found in secrets or config")

# Set environment variables
os.environ["OPENAI_API_KEY"] = get_secret_or_config('openai_api_key')
os.environ["TMDB_ACCESS_TOKEN"] = get_secret_or_config('tmdb_access_token')
os.environ['SPOTIPY_CLIENT_ID'] = get_secret_or_config('spotipy_client_id')
os.environ['SPOTIPY_CLIENT_SECRET'] = get_secret_or_config('spotipy_client_secret')
os.environ['SPOTIPY_REDIRECT_URI'] = get_secret_or_config('spotipy_redirect_uri')

logging.basicConfig(
format="%(message)s",
handlers=[logging.StreamHandler(ColorPrint())],
)
logger.setLevel(logging.INFO)

scenario = st.selectbox(
'Which API you want to play with',
('TMDB', 'Spotify'))

scenario = scenario.lower()

if scenario == 'tmdb':
with open("specs/tmdb_oas.json") as f:
raw_tmdb_api_spec = json.load(f)

api_spec = reduce_openapi_spec(raw_tmdb_api_spec, only_required=False)

access_token = os.environ["TMDB_ACCESS_TOKEN"]
headers = {
'Authorization': f'Bearer {access_token}'
}
elif scenario == 'spotify':
with open("specs/spotify_oas.json") as f:
raw_api_spec = json.load(f)

api_spec = reduce_openapi_spec(raw_api_spec, only_required=False, merge_allof=True)

scopes = list(raw_api_spec['components']['securitySchemes']['oauth_2_0']['flows']['authorizationCode']['scopes'].keys())
access_token = spotipy.util.prompt_for_user_token(scope=','.join(scopes))
headers = {
'Authorization': f'Bearer {access_token}'
}
else:
raise ValueError(f"Unsupported scenario: {scenario}")

requests_wrapper = Requests(headers=headers)

llm = OpenAI(model_name="text-davinci-003", temperature=0.0, max_tokens=-1)
rest_gpt = RestGPT(llm, api_spec=api_spec, scenario=scenario, requests_wrapper=requests_wrapper, simple_parser=False)

if scenario == 'tmdb':
query_example = 'What is the most popular movie directed by SS Rajamouli in 2022' # Since ChatGPT is trained with data till sep 2021 we ask query for 2022
elif scenario == 'spotify':
query_example = "Add Summertime Sadness by Lana Del Rey in my first playlist"
st.write(f"Example instruction: {query_example}")
query = st.text_input('Query', query_example)
st.info("Since ChatGPT is trained with data till sep 2021 we ask any query beyond the that time period")

# Create a placeholder for displaying either the button or the status
status_placeholder = st.empty()

# Initialize a variable to track the button state
button_clicked = False

# Check if the button was clicked
if status_placeholder.button("Run", type="primary"):
button_clicked = True

# Simulate some processing
if button_clicked:
with status_placeholder.status("Running"):
logger.info(f"Query: {query}")
start_time = time.time()
output = rest_gpt.run(query)
st.success(output)
logger.info(f"Execution Time: {time.time() - start_time}")
if __name__ == '__main__':
main()