From d0e1da8200b83fea020d9fdea827fc6a8fecb585 Mon Sep 17 00:00:00 2001 From: Javier Luraschi Date: Mon, 5 Aug 2024 21:06:55 -0700 Subject: [PATCH] add support for tools --- CHANGELOG.md | 4 + README.md | 2 +- javascript/README.md | 2 +- python/hal9/__init__.py | 1 + python/hal9/complete.py | 91 +++++++++++++++++++ python/hal9/iobind.py | 11 ++- python/messages.json | 1 + python/messages.pkl | 1 + python/pyproject.toml | 2 +- .../tests/{test_extract.py => test_code.py} | 0 python/tests/test_iobind.py | 5 + website/learn/genapps/llmapps/tools.md | 27 ++++++ website/reference/complete.md | 22 +++++ website/src/pages/index.md | 2 +- 14 files changed, 163 insertions(+), 8 deletions(-) create mode 100644 python/hal9/complete.py create mode 100644 python/messages.json create mode 100644 python/messages.pkl rename python/tests/{test_extract.py => test_code.py} (100%) create mode 100644 python/tests/test_iobind.py create mode 100644 website/learn/genapps/llmapps/tools.md create mode 100644 website/reference/complete.md diff --git a/CHANGELOG.md b/CHANGELOG.md index d18226bf..81cf7666 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +## 2.6.1 + +- Add `complete()` and `describe()` to support handling tools + ## 2.5.7 - `save()` supports creating subfolders diff --git a/README.md b/README.md index bf0917dc..69efe0db 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ [![GitHub star chart](https://img.shields.io/github/stars/hal9ai/hal9?style=flat-square)](https://star-history.com/#hal9ai/hal9) Create and deploy generative ([LLMs](https://github.com/Hannibal046/Awesome-LLM) and [diffusers](https://github.com/huggingface/diffusers)) applications (chatbots and APIs) in seconds. -- **Open:** Use any model ([OpenAI](https://platform.openai.com/docs/api-reference/introduction), [Llama](https://ai.meta.com/blog/5-steps-to-getting-started-with-llama-2/), [Groq](https://docs.api.groq.com/md/tutorials/python.groqapi.html), [MidJourney](https://docs.imagineapi.dev/en)) and any library like ([LangChainl](https://python.langchain.com/v0.1/docs/get_started/quickstart/), [DSPy](https://dspy-docs.vercel.app/docs/quick-start/installation)). +- **Open:** Use any model ([OpenAI](https://platform.openai.com/docs/api-reference/introduction), [Llama](https://ai.meta.com/blog/5-steps-to-getting-started-with-llama-2/), [Groq](https://docs.api.groq.com/md/tutorials/python.groqapi.html), [MidJourney](https://docs.imagineapi.dev/en)) and any library like ([LangChain](https://python.langchain.com/v0.1/docs/get_started/quickstart/), [DSPy](https://dspy-docs.vercel.app/docs/quick-start/installation)). - **Intuitive:** No need to learn app frameworks ([Flask](https://flask.palletsprojects.com/en/3.0.x/quickstart/)), simply use `input()` and `print()`, or write file to disk. - **Scalable:** Engineers can integrate your app with scalable technologies ([Docker](https://www.docker.com/), [Kubernetes](https://kubernetes.io/), etc) - **Powerful:** Using an OS process (stdin, stdout, files) as our app contract, enables long-running agents, multiple programming languages, and complex system dependencies. diff --git a/javascript/README.md b/javascript/README.md index 8c572697..b4d2f713 100644 --- a/javascript/README.md +++ b/javascript/README.md @@ -6,7 +6,7 @@ [![GitHub star chart](https://img.shields.io/github/stars/hal9ai/hal9?style=flat-square)](https://star-history.com/#hal9ai/hal9) Create and deploy generative ([LLMs](https://github.com/Hannibal046/Awesome-LLM) and [diffusers](https://github.com/huggingface/diffusers)) applications (chatbots and APIs) in seconds. -- **Open:** Use any model ([OpenAI](https://platform.openai.com/docs/api-reference/introduction), [Llama](https://ai.meta.com/blog/5-steps-to-getting-started-with-llama-2/), [Groq](https://docs.api.groq.com/md/tutorials/python.groqapi.html), [MidJourney](https://docs.imagineapi.dev/en)) and any library like ([LangChainl](https://python.langchain.com/v0.1/docs/get_started/quickstart/), [DSPy](https://dspy-docs.vercel.app/docs/quick-start/installation)). +- **Open:** Use any model ([OpenAI](https://platform.openai.com/docs/api-reference/introduction), [Llama](https://ai.meta.com/blog/5-steps-to-getting-started-with-llama-2/), [Groq](https://docs.api.groq.com/md/tutorials/python.groqapi.html), [MidJourney](https://docs.imagineapi.dev/en)) and any library like ([LangChain](https://python.langchain.com/v0.1/docs/get_started/quickstart/), [DSPy](https://dspy-docs.vercel.app/docs/quick-start/installation)). - **Intuitive:** No need to learn app frameworks ([Flask](https://flask.palletsprojects.com/en/3.0.x/quickstart/)), simply use `input()` and `print()`, or write file to disk. - **Scalable:** Engineers can integrate your app with scalable technologies ([Docker](https://www.docker.com/), [Kubernetes](https://kubernetes.io/), etc) - **Powerful:** Using an OS process (stdin, stdout, files) as our app contract, enables long-running agents, multiple programming languages, and complex system dependencies. diff --git a/python/hal9/__init__.py b/python/hal9/__init__.py index 50c4c41e..4c9f8dd0 100644 --- a/python/hal9/__init__.py +++ b/python/hal9/__init__.py @@ -5,3 +5,4 @@ from hal9.code import extract from hal9.urls import is_url, url_contents from hal9.events import event +from hal9.complete import complete, describe diff --git a/python/hal9/complete.py b/python/hal9/complete.py new file mode 100644 index 00000000..b359b801 --- /dev/null +++ b/python/hal9/complete.py @@ -0,0 +1,91 @@ +import inspect +import json + +type_mapping = { + int: "integer", + str: "string", + float: "number", + bool: "boolean", + list: "array", + dict: "object" +} + +def describe_single(func): + """ + Takes a function and returns its metadata as a JSON string in the specified format. + """ + signature = inspect.signature(func) + params = signature.parameters + + # Collecting function metadata + func_name = func.__name__ + func_doc = inspect.getdoc(func) or "" + + properties = {} + for name, param in params.items(): + param_type = param.annotation + if param_type in type_mapping: + json_type = type_mapping[param_type] + else: + json_type = "string" # default to string if type is not mapped + properties[name] = {"type": json_type} + + result = { + "name": func_name, + "description": func_doc, + "parameters": { + "type": "object", + "properties": properties, + "required": list(properties.keys()) + } + } + + return result + +def describe(functions): + return [describe_single(func) for func in functions] + +def complete(completion, messages = [], tools = [], show = True): + tools = {func.__name__: func for func in tools} + content = result= "" + tool_name = tool_text = "" + tool_args = None + + if not 'stream' in str(type(completion)).lower(): + content = completion.choices[0].message.content + if chunk.choices[0].message.function_call != None: + tool_name = chunk.choices[0].message.function_call.name + tool_args = json.loads(chunk.choices[0].message.function_call.arguments) + if show: + print(content) + else: + for chunk in completion: + if chunk.choices and len(chunk.choices) > 0 and chunk.choices[0].delta: + if chunk.choices[0].delta.content: + if show: + print(chunk.choices[0].delta.content, end="") + content += chunk.choices[0].delta.content + if chunk.choices[0].delta.function_call != None: + tool_text += chunk.choices[0].delta.function_call.arguments + if chunk.choices[0].delta.function_call.name: + tool_name = chunk.choices[0].delta.function_call.name + try: + tool_args = json.loads(tool_text) + except Exception as e: + pass + if show: + print() + + if len(content) > 0: + messages.append({ "role": "assistant", "content": content}) + + if tool_args: + if tool_name in tools: + try: + result = str(tools[tool_name](**tool_args)) + except Exception as e: + result = str(e) + print(result) + messages.append({ "role": "function", "name": tool_name, "content": result}) + + return content + result \ No newline at end of file diff --git a/python/hal9/iobind.py b/python/hal9/iobind.py index 5563c0ea..08db6289 100644 --- a/python/hal9/iobind.py +++ b/python/hal9/iobind.py @@ -99,9 +99,12 @@ def save(name, contents = None, hidden = False, files = None): }, indent=2) Path(name + '.asset').write_text(asset_definition) -def input(prompt = "", extract = False): +def input(prompt = "", extract = False, messages = []): print(prompt, end="") - text = sys.stdin.read() + prompt = sys.stdin.read() + if extract: - text = url_contents(text) - return text + prompt = url_contents(text) + + messages.append({"role": "user", "content": prompt}) + return prompt diff --git a/python/messages.json b/python/messages.json new file mode 100644 index 00000000..3cc762b5 --- /dev/null +++ b/python/messages.json @@ -0,0 +1 @@ +"" \ No newline at end of file diff --git a/python/messages.pkl b/python/messages.pkl new file mode 100644 index 00000000..9b6ff7ac --- /dev/null +++ b/python/messages.pkl @@ -0,0 +1 @@ +€N. \ No newline at end of file diff --git a/python/pyproject.toml b/python/pyproject.toml index 012dc299..0a00aec6 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "hal9" -version = "2.6.0" +version = "2.6.1" description = "" authors = ["Javier Luraschi "] readme = "README.md" diff --git a/python/tests/test_extract.py b/python/tests/test_code.py similarity index 100% rename from python/tests/test_extract.py rename to python/tests/test_code.py diff --git a/python/tests/test_iobind.py b/python/tests/test_iobind.py new file mode 100644 index 00000000..30f1b414 --- /dev/null +++ b/python/tests/test_iobind.py @@ -0,0 +1,5 @@ +import hal9 as h9 + +def test_save_empty_error(): + h9.save("messages", None) + assert True diff --git a/website/learn/genapps/llmapps/tools.md b/website/learn/genapps/llmapps/tools.md new file mode 100644 index 00000000..08042517 --- /dev/null +++ b/website/learn/genapps/llmapps/tools.md @@ -0,0 +1,27 @@ +# Tools + +This section presents how to add tools to your LLM application. + +```python +import os +from openai import OpenAI +import hal9 as h9 + +def multiply(a: int, b: int) -> int: + """Multiply two numbers.""" + return a * b + +messages = h9.load("messages", []) +prompt = h9.input(messages = messages) + +completion = OpenAI().chat.completions.create( + model = "gpt-4", + messages = messages, + functions = h9.describe([multiply]), + function_call = "auto", + stream = True +) + +h9.complete(completion, messages = messages, functions = [multiply]) +h9.save("messages", messages, hidden = True) +``` diff --git a/website/reference/complete.md b/website/reference/complete.md new file mode 100644 index 00000000..695d3e5c --- /dev/null +++ b/website/reference/complete.md @@ -0,0 +1,22 @@ +# Complete + +Convenience functions to handle LLM completions + +## Complete +`complete(completion, messages, tools, show)`

+Finishes completing the completions by printing them, appending messages, or handling tools. + +| Param | Type | Description | +| --- | --- | --- | +| completion | String | The completions form the LLM. | +| messages | Array | Messages to append replies to, defaults to `[]`. | +| tools | Array | An array of functions to use as tools, defaults `[]`. | +| show | Bool | Print the completions? Defaults to `True`. | + +## Describe +`describe(funcs)`

+Describes an array of functions with descriptions, parameters and types. Useful when completing chats. + +| Param | Type | Description | +| --- | --- | --- | +| functions | Array | An array of functions to describe. | diff --git a/website/src/pages/index.md b/website/src/pages/index.md index f401e6a2..ad83feb7 100644 --- a/website/src/pages/index.md +++ b/website/src/pages/index.md @@ -9,7 +9,7 @@ import { Floating } from '../components/floating.jsx'; Create and deploy generative (LLMs and diffusers) applications (chatbots and APIs) in seconds. Focus on AI (RAG, fine-tuning, alignment, training) and skip engineering tasks (frontend development, backend integration, deployment, operations).
- Open to any model (OpenAI, Llama, Groq, Midjourney) and any library like (LangChainl, DSPy). + Open to any model (OpenAI, Llama, Groq, Midjourney) and any library like (LangChain, DSPy). No need to learn app frameworks (flask), intuitively use `input()` and `print()`, or write file to disk. Engineers can integrate your app with scalable technologies (Docker, Kubernetes, etc). Powerful architecture for agents, multiple programming languages, and complex dependencies.