diff --git a/README.md b/README.md index 3b6584b..1ac0641 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ BrickLLM is a Python library for generating RDF files following the BrickSchema ontology using Large Language Models (LLMs). -## 🚀 Features +## 🧰 Features - Generate BrickSchema-compliant RDF files from natural language descriptions of buildings and facilities - Support for multiple LLM providers (OpenAI, Anthropic, Fireworks) diff --git a/brickllm/__init__.py b/brickllm/__init__.py index cdbd062..1a4e70a 100644 --- a/brickllm/__init__.py +++ b/brickllm/__init__.py @@ -1,11 +1,17 @@ from .configs import GraphConfig -from .schemas import ElemListSchema, RelationshipsSchema, TTLSchema +from .schemas import ( + ElemListSchema, + RelationshipsSchema, + TTLSchema, + TTLToBuildingPromptSchema, +) from .states import State, StateLocal __all__ = [ "ElemListSchema", "RelationshipsSchema", "TTLSchema", + "TTLToBuildingPromptSchema", "State", "StateLocal", "GraphConfig", diff --git a/brickllm/graphs/abstract_graph.py b/brickllm/graphs/abstract_graph.py index c9ef276..9268dec 100644 --- a/brickllm/graphs/abstract_graph.py +++ b/brickllm/graphs/abstract_graph.py @@ -1,12 +1,13 @@ import os from abc import ABC, abstractmethod -from typing import Any, Dict, List, Union +from typing import Any, Dict, List, Tuple, Union from langchain.chat_models.base import BaseChatModel from langgraph.graph import StateGraph from PIL import Image from ..helpers.llm_models import _get_model +from ..utils import ttl_to_building_prompt class AbstractBrickSchemaGraph(ABC): @@ -17,6 +18,8 @@ def __init__(self, model: Union[str, BaseChatModel] = "openai"): self.config = {"configurable": {"thread_id": "1", "llm_model": self.model}} self.result = None self.ttl_output = None + self.generated_building_description = None + self.generated_key_elements = None @abstractmethod def build_graph(self): @@ -81,3 +84,11 @@ def save_ttl_output(self, output_file: str = "brick_schema_output.ttl") -> None: with open(output_file, "w") as f: f.write(self.ttl_output) + + def ttl_to_building_description(self) -> Tuple[str, List[str]]: + if self.ttl_output is None: + raise ValueError("No TTL output found. Please run the graph first.") + self.generated_building_description, self.generated_key_elements = ( + ttl_to_building_prompt(self.ttl_output, self.model) + ) + return self.generated_building_description, self.generated_key_elements diff --git a/brickllm/helpers/__init__.py b/brickllm/helpers/__init__.py index 5ab8e7d..12ec76c 100644 --- a/brickllm/helpers/__init__.py +++ b/brickllm/helpers/__init__.py @@ -6,6 +6,7 @@ prompt_template_local, schema_to_ttl_instructions, ttl_example, + ttl_to_user_prompt, ) __all__ = [ @@ -16,4 +17,5 @@ "schema_to_ttl_instructions", "ttl_example", "prompt_template_local", + "ttl_to_user_prompt", ] diff --git a/brickllm/helpers/prompts.py b/brickllm/helpers/prompts.py index 11a0bf5..98125a3 100644 --- a/brickllm/helpers/prompts.py +++ b/brickllm/helpers/prompts.py @@ -9,7 +9,7 @@ You are now asked to identify the elements presents in the user prompt, even if not explicitly mentioned.\n USER PROMPT: {prompt} \n ELEMENTS: {elements_dict} \n - """ + """ # noqa get_elem_children_instructions: str = """ You are a BrickSchema ontology expert and you are provided with a user prompt which describes a building or facility.\n @@ -22,7 +22,7 @@ USER PROMPT: {prompt} \n ELEMENTS HIERARCHY: {elements_list} \n - """ + """ # noqa get_relationships_instructions: str = """ You are a BrickSchema ontology expert and are provided with a detailed description of a building or facility.\n @@ -35,7 +35,7 @@ If an element has no relationships, add an empty string in place of the missing component ("Room.1","").\n Hierarchical structure: {building_structure}\n USER PROMPT: {prompt} -""" +""" # noqa ttl_example: str = """ @prefix bldg: . @@ -84,7 +84,7 @@ bldg:livingroom a brick:Room ; brick:isPartOf bldg:Milano_Residence_1 . -""" +""" # noqa schema_to_ttl_instructions: str = """ You are a BrickSchema ontology expert and you are provided with a user prompt which describes a building or facility.\n @@ -102,7 +102,23 @@ USER DESCRIPTION: {prompt}\n COMPONENTS DICT: {sensors_dict}\n -""" +""" # noqa + +ttl_to_user_prompt: str = """ + You are a BrickSchema ontology expert tasked with generating a clear and concise description of a building or facility from a TTL script. + + Your output must follow these guidelines: + - Focus on the key building characteristics, components and relationships present in the TTL + - Maintain technical accuracy and use proper Brick terminology + - Keep descriptions clear and well-structured + - Only include information explicitly stated in the TTL script + - If no TTL content is provided, return an empty string + + TTL script to analyze: + + {ttl_script} + +""" # noqa prompt_template_local: str = """ Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request. @@ -113,4 +129,4 @@ {user_prompt} ### Response: -""" +""" # noqa diff --git a/brickllm/schemas.py b/brickllm/schemas.py index dd61887..5ba06b4 100644 --- a/brickllm/schemas.py +++ b/brickllm/schemas.py @@ -16,3 +16,12 @@ class TTLSchema(BaseModel): ttl_output: str = Field( ..., description="The generated BrickSchema turtle/rdf script." ) + + +class TTLToBuildingPromptSchema(BaseModel): + building_description: str = Field( + ..., description="The generated building description." + ) + key_elements: List[str] = Field( + ..., description="The generated list of key elements." + ) diff --git a/brickllm/utils/__init__.py b/brickllm/utils/__init__.py index 5524b11..a7bf289 100644 --- a/brickllm/utils/__init__.py +++ b/brickllm/utils/__init__.py @@ -22,6 +22,7 @@ validate_ttl, ) from .rdf_parser import extract_rdf_graph +from .ttl_to_prompt import ttl_to_building_prompt __all__ = [ "find_parents", @@ -44,4 +45,5 @@ "general_query", "validate_ttl", "extract_rdf_graph", + "ttl_to_building_prompt", ] diff --git a/brickllm/utils/ttl_to_prompt.py b/brickllm/utils/ttl_to_prompt.py new file mode 100644 index 0000000..115e280 --- /dev/null +++ b/brickllm/utils/ttl_to_prompt.py @@ -0,0 +1,27 @@ +from typing import List, Tuple, Union + +from langchain.chat_models.base import BaseChatModel +from langchain_community.llms import Ollama +from langchain_core.messages import HumanMessage, SystemMessage + +from ..helpers import ttl_to_user_prompt +from ..schemas import TTLToBuildingPromptSchema + + +def ttl_to_building_prompt( + ttl_file: str, llm: Union[Ollama, BaseChatModel] +) -> Tuple[str, List[str]]: + + # Enforce structured output + structured_llm = llm.with_structured_output(TTLToBuildingPromptSchema) + + # System message + system_message = ttl_to_user_prompt.format(ttl_script=ttl_file) + + # Generate question + answer = structured_llm.invoke( + [SystemMessage(content=system_message)] + + [HumanMessage(content="Generate the TTL.")] + ) + + return answer.building_description, answer.key_elements diff --git a/examples/example_custom_llm.py b/examples/example_custom_llm.py index b548c58..6fa0231 100644 --- a/examples/example_custom_llm.py +++ b/examples/example_custom_llm.py @@ -34,3 +34,12 @@ # save the result to a file brick_graph.save_ttl_output("my_building_custom.ttl") + +# Generate the building description from the generated ttl file +building_description, key_elements = brick_graph.ttl_to_building_description() + +print("Generated building description:") +print(building_description) +print("--------------------------------") +print("Generated key elements:") +print(key_elements) diff --git a/examples/example_openai.py b/examples/example_openai.py index 9d8107d..d70c0b9 100644 --- a/examples/example_openai.py +++ b/examples/example_openai.py @@ -32,3 +32,12 @@ # save the result to a file brick_graph.save_ttl_output("my_building.ttl") + +# Generate the building description from the generated ttl file +building_description, key_elements = brick_graph.ttl_to_building_description() + +print("Generated building description:") +print(building_description) +print("--------------------------------") +print("Generated key elements:") +print(key_elements)