diff --git a/apps/hal9/app.py b/apps/hal9/app.py index c3d926bf..1088e587 100644 --- a/apps/hal9/app.py +++ b/apps/hal9/app.py @@ -1,19 +1,21 @@ from utils import generate_response, load_messages, insert_message, execute_function, save_messages, insert_tool_message, is_url, download_file, generate_text_embeddings_parquet from tools.calculator import solve_math_problem_description, solve_math_problem -from tools.generic import answer_generic_question_description, answer_generic_question +from tools.generic import answer_generic_wrapper_description, answer_generic_wrapper, answer_generic_question_submit +from tools.generic import answer_generic_question, answer_generic_question_description from tools.csv_agent import analyze_csv_description, analyze_csv from tools.image_agent import images_management_system, images_management_system_description, add_images_descriptions from tools.hal9 import answer_hal9_questions_description, answer_hal9_questions from tools.text_agent import analyze_text_file_description, analyze_text_file from tools.streamlit import streamlit_generator, streamlit_generator_description from tools.website import website_generator, website_generator_description +from concurrent.futures import ThreadPoolExecutor, Future # load messages messages = load_messages() # load tools -tools_descriptions = [solve_math_problem_description, answer_generic_question_description, analyze_csv_description, images_management_system_description, answer_hal9_questions_description, analyze_text_file_description, streamlit_generator_description, website_generator_description] -tools_functions = [solve_math_problem, answer_generic_question, analyze_csv, images_management_system, answer_hal9_questions, analyze_text_file, streamlit_generator, website_generator] +tools_descriptions = [answer_generic_wrapper_description, solve_math_problem_description, analyze_csv_description, images_management_system_description, answer_hal9_questions_description, analyze_text_file_description, streamlit_generator_description, website_generator_description] +tools_functions = [answer_generic_wrapper, solve_math_problem, analyze_csv, images_management_system, answer_hal9_questions, analyze_text_file, streamlit_generator, website_generator] if len(messages) < 1: messages = insert_message(messages, "system", "You are Hal9, a helpful and highly capable AI assistant. Your primary responsibility is to analyze user questions and select the most appropriate tool to provide precise, relevant, and actionable responses. Always prioritize using the right tool to ensure efficiency and clarity in your answers.") @@ -32,12 +34,16 @@ print(f"I'm ready to answer questions about your file: {filename}") else: user_input = user_input.replace("\f", "\n") - messages = insert_message(messages, "user", user_input) - response = generate_response("openai", "gpt-4-turbo", messages, tools_descriptions, tool_choice = "required", parallel_tool_calls=False) + with ThreadPoolExecutor() as executor: + answer_generic_question_submit(executor, user_input) + + messages = insert_message(messages, "user", user_input) - tool_result = execute_function(response, tools_functions) + response = generate_response("openai", "gpt-4-turbo", messages, tools_descriptions, tool_choice = "required", parallel_tool_calls=False) - insert_tool_message(messages, response, tool_result) + tool_result = execute_function(response, tools_functions) + + insert_tool_message(messages, response, tool_result) save_messages(messages) \ No newline at end of file diff --git a/apps/hal9/tools/generic.py b/apps/hal9/tools/generic.py index eff76e6f..d55d39af 100644 --- a/apps/hal9/tools/generic.py +++ b/apps/hal9/tools/generic.py @@ -1,7 +1,7 @@ from groq import Groq from utils import stream_print, load_messages, insert_message, save_messages -def answer_generic_question(user_input): +def answer_generic_question(user_input, show = True): # load messages messages = load_messages(file_path="./.storage/.generic_agent_messages.json") messages = insert_message(messages, "user", user_input) @@ -12,7 +12,7 @@ def answer_generic_question(user_input): seed = 1, stream = True) - text_response = stream_print(stream) + text_response = stream_print(stream, show) messages = insert_message(messages, "assistant", text_response) save_messages(messages, file_path="./.storage/.generic_agent_messages.json") @@ -36,4 +36,33 @@ def answer_generic_question(user_input): "additionalProperties": False, }, } +} + +answer_generic_future = None +def answer_generic_wrapper(user_input): + result = answer_generic_future.result() + print(result) + +def answer_generic_question_submit(executor, user_input): + global answer_generic_future + answer_generic_future = executor.submit(answer_generic_question, user_input, False) + +answer_generic_wrapper_description = { + "type": "function", + "function": { + "name": "answer_generic_wrapper", + "description": "Handles general questions or queries provided by the user by taking their input and generating a meaningful response.", + "strict": True, + "parameters": { + "type": "object", + "properties": { + "user_input": { + "type": "string", + "description": "Take the user input and pass the same string to the function", + }, + }, + "required": ["user_input"], + "additionalProperties": False, + }, + } } \ No newline at end of file diff --git a/apps/hal9/utils.py b/apps/hal9/utils.py index d2c13716..bde7deed 100644 --- a/apps/hal9/utils.py +++ b/apps/hal9/utils.py @@ -90,7 +90,7 @@ def generate_response( "n": n } - if tools is not None: + if tools is not None and not client_type == "groq": payload["parallel_tool_calls"] = parallel_tool_calls # Generate the response using the client's completion API. @@ -179,11 +179,12 @@ def execute_function(model_response, functions): print(f"Error executing function '{function_name}': {e}") raise -def stream_print(stream): +def stream_print(stream, show = True): content = "" for chunk in stream: if len(chunk.choices) > 0 and chunk.choices[0].delta.content is not None: - print(chunk.choices[0].delta.content, end="") + if show: + print(chunk.choices[0].delta.content, end="") content += chunk.choices[0].delta.content return content