diff --git a/13_sandboxes/codelangchain/agent.py b/13_sandboxes/codelangchain/agent.py index 0caa0128c..cd8da9eb2 100644 --- a/13_sandboxes/codelangchain/agent.py +++ b/13_sandboxes/codelangchain/agent.py @@ -18,8 +18,9 @@ # ## Setup import modal -from src import edges, nodes, retrieval -from src.common import COLOR, PYTHON_VERSION, image + +from .src import edges, nodes, retrieval +from .src.common import COLOR, PYTHON_VERSION, image # You will need two [Modal Secrets](https://modal.com/docs/guide/secrets) to run this example: # one to access the OpenAI API and another to access the LangSmith API for logging the agent's behavior. @@ -43,7 +44,7 @@ # library to generate text with a pre-trained model. Let's create a Sandbox with the necessary dependencies. -def create_sandbox() -> modal.Sandbox: +def create_sandbox(app) -> modal.Sandbox: # Change this image (and the retrieval logic in the retrieval module) # if you want the agent to give coding advice on other libraries! agent_image = modal.Image.debian_slim( @@ -101,7 +102,8 @@ def run(code: str, sb: modal.Sandbox) -> tuple[str, str]: def construct_graph(sandbox: modal.Sandbox, debug: bool = False): from langgraph.graph import StateGraph - from src.common import GraphState + + from .src.common import GraphState # Crawl the transformers documentation to inform our code generation context = retrieval.retrieve_docs(debug=debug) @@ -137,7 +139,7 @@ def go( debug: bool = False, ): """Compiles the Python code generation agent graph and runs it, returning the result.""" - sb = create_sandbox() + sb = create_sandbox(app) graph = construct_graph(sb, debug=debug) runnable = graph.compile() diff --git a/13_sandboxes/codelangchain/codelangchain.py b/13_sandboxes/codelangchain/codelangchain.py deleted file mode 100644 index c19dfbdb7..000000000 --- a/13_sandboxes/codelangchain/codelangchain.py +++ /dev/null @@ -1,65 +0,0 @@ -# --- -# pytest: false -# cmd: ["modal", "serve", "13_sandboxes.codelangchain.codelangchain"] -# --- -"""Application serving logic for the CodeLangChain agent.""" - -import agent -import modal -from agent import app, create_sandbox -from fastapi import FastAPI, responses -from fastapi.middleware.cors import CORSMiddleware - -# create a FastAPI app -web_app = FastAPI( - title="CodeLangChain Server", - version="1.0", - description="Answers questions about Python programming.", -) - - -# set all CORS enabled origins -web_app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], - expose_headers=["*"], -) - - -# host it on Modal -@app.function(keep_warm=1) -@modal.asgi_app() -def serve(): - from langchain_core.runnables import RunnableLambda - from langserve import add_routes - - def inp(question: str) -> dict: - return {"keys": {"question": question, "iterations": 0}} - - def out(state: dict) -> str: - if "finish" in state: - return state["finish"]["keys"]["response"] - elif len(state) > 0 and "finish" in state[-1]: - return state[-1]["finish"]["keys"]["response"] - else: - return str(state) - - graph = agent.construct_graph(create_sandbox(), debug=False).compile() - - chain = RunnableLambda(inp) | graph | RunnableLambda(out) - - add_routes( - web_app, - chain, - path="/codelangchain", - ) - - # redirect the root to the interactive playground - @web_app.get("/") - def redirect(): - return responses.RedirectResponse(url="/codelangchain/playground") - - return web_app diff --git a/13_sandboxes/codelangchain/langserve.py b/13_sandboxes/codelangchain/langserve.py new file mode 100644 index 000000000..dcbe0cff8 --- /dev/null +++ b/13_sandboxes/codelangchain/langserve.py @@ -0,0 +1,86 @@ +# --- +# pytest: false +# cmd: ["modal", "serve", "13_sandboxes.codelangchain.langserve"] +# --- + +# # Deploy LangChain and LangGraph applications with LangServe + +# This code demonstrates how to deploy a +# [LangServe](https://python.langchain.com/docs/langserve/) application on Modal. +# LangServe makes it easy to wrap LangChain and LangGraph applications in a FastAPI server, +# and Modal makes it easy to deploy FastAPI servers. + +# The LangGraph application that it serves is from our [sandboxed LLM coding agent example](https://modal.com/docs/examples/agent). + +# You can find the code for the agent and several other code files associated with this example in the +# [`codelangchain` directory of our examples repo](https://github.com/modal-labs/modal-examples/tree/main/13_sandboxes/codelangchain). + +import modal + +from .agent import construct_graph, create_sandbox +from .src.common import image + +app = modal.App("example-langserve") + +image = image.pip_install("langserve[all]==0.3.0") + + +@app.function( + image=image, + secrets=[ # see the agent.py file for more information on Secrets + modal.Secret.from_name("openai-secret"), + modal.Secret.from_name("my-langsmith-secret"), + ], +) +@modal.asgi_app() +def serve(): + from fastapi import FastAPI, responses + from fastapi.middleware.cors import CORSMiddleware + from langchain_core.runnables import RunnableLambda + from langserve import add_routes + + # create a FastAPI app + web_app = FastAPI( + title="CodeLangChain Server", + version="1.0", + description="Writes code and checks if it runs.", + ) + + # set all CORS enabled origins + web_app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + expose_headers=["*"], + ) + + def inp(question: str) -> dict: + return {"keys": {"question": question, "iterations": 0}} + + def out(state: dict) -> str: + if "finish" in state: + return state["finish"]["keys"]["response"] + elif len(state) > 0 and "finish" in state[-1]: + return state[-1]["finish"]["keys"]["response"] + else: + return str(state) + + graph = construct_graph(create_sandbox(app), debug=False).compile() + + chain = RunnableLambda(inp) | graph | RunnableLambda(out) + + add_routes( + web_app, + chain, + path="/codelangchain", + ) + + # redirect the root to the interactive playground + @web_app.get("/") + def redirect(): + return responses.RedirectResponse(url="/codelangchain/playground") + + # return the FastAPI app and Modal will deploy it for us + return web_app diff --git a/13_sandboxes/codelangchain/src/common.py b/13_sandboxes/codelangchain/src/common.py index 50c9bea19..3d9038a62 100644 --- a/13_sandboxes/codelangchain/src/common.py +++ b/13_sandboxes/codelangchain/src/common.py @@ -14,7 +14,6 @@ "langgraph==0.2.39", "langchain-community==0.3.3", "langchain-openai==0.2.3", - "langserve[all]==0.3.0", )