Skip to content

Commit

Permalink
Merge pull request #20 from EURAC-EEBgroup/pre/beta
Browse files Browse the repository at this point in the history
Pre/beta
  • Loading branch information
PeriniM authored Jan 2, 2025
2 parents 23c57e2 + 594db70 commit a6540be
Show file tree
Hide file tree
Showing 21 changed files with 377 additions and 253 deletions.
28 changes: 28 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,31 @@
## [1.3.0-beta.1](https://github.com/EURAC-EEBgroup/brick-llm/compare/v1.2.0...v1.3.0-beta.1) (2025-01-02)


### Features

* get_sensor implementation ([e100a88](https://github.com/EURAC-EEBgroup/brick-llm/commit/e100a886acb536077d4ec69bae07f7f84626178d))
* get_sensor_presence edge ([b7a3e59](https://github.com/EURAC-EEBgroup/brick-llm/commit/b7a3e597e107844fd484068111037756d25d1cd4))
* implementing ollama 3.2 through ChatOllama models ([bf3cf6b](https://github.com/EURAC-EEBgroup/brick-llm/commit/bf3cf6bab72add3f449a3932d51e360a76c18da3))


### Bug Fixes

* extract answer content in local generation ([a8f7e39](https://github.com/EURAC-EEBgroup/brick-llm/commit/a8f7e3954c25b1d2584a40c7329a64f9b77f199f))
* poetry run pre-commit ([edf4664](https://github.com/EURAC-EEBgroup/brick-llm/commit/edf4664deda8a8faaa4e0b1d5ce92276f2bea137))
* prompt correction ([cf5babd](https://github.com/EURAC-EEBgroup/brick-llm/commit/cf5babd71b703f4df2764b7e410524923df8253b))
* prompt correction ([3e8d61a](https://github.com/EURAC-EEBgroup/brick-llm/commit/3e8d61a9e008e8e4f24153ab31bf781f910a6cd0))
* prompt engineering ([a7ae3bc](https://github.com/EURAC-EEBgroup/brick-llm/commit/a7ae3bcfbf69fa369ef7a0a21c39596aa9c13791))


### chore

* added missing dependencies and linting ([4e4169c](https://github.com/EURAC-EEBgroup/brick-llm/commit/4e4169c053f48a3bd2bf3252a052c64565558ae2))


### Docs

* added env note ([23c57e2](https://github.com/EURAC-EEBgroup/brick-llm/commit/23c57e249931c71130ae8d1b880bee1950b1501d))

## [1.2.0](https://github.com/EURAC-EEBgroup/brick-llm/compare/v1.1.2...v1.2.0) (2024-11-11)


Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ pip install poetry
poetry install

# Install pre-commit hooks
pre-commit install
poetry runpre-commit install
```

</details>
Expand All @@ -52,7 +52,7 @@ pre-commit install

Here's a simple example of how to use BrickLLM:

> [!NOTE]
> [!NOTE]
> You must first create a [.env](.env.example) file with the API keys of the specified LLM provider (if not local) and load them in the environment
``` python
Expand Down
2 changes: 2 additions & 0 deletions brickllm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from .schemas import (
ElemListSchema,
RelationshipsSchema,
SensorSchema,
TTLSchema,
TTLToBuildingPromptSchema,
)
Expand All @@ -17,4 +18,5 @@
"StateLocal",
"GraphConfig",
"custom_logger",
"SensorSchema",
]
3 changes: 2 additions & 1 deletion brickllm/edges/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .check_sensor_presence import check_sensor_presence
from .validate_condition import validate_condition
from .validate_condition_local import validate_condition_local

__all__ = ["validate_condition", "validate_condition_local"]
__all__ = ["validate_condition", "validate_condition_local", "check_sensor_presence"]
42 changes: 42 additions & 0 deletions brickllm/edges/check_sensor_presence.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
from typing import Any, Dict, Literal

from ..logger import custom_logger
from ..utils import get_hierarchical_info


def check_sensor_presence(
state: Dict[str, Any]
) -> Literal["get_sensors", "schema_to_ttl"]:
"""
Check if the sensors are present in the building structure.
Args:
state (Dict[str, Any]): The current state containing the sensor structure.
Returns:
Literal["get_sensors", "schema_to_ttl"]: The next node to visit.
"""

custom_logger.eurac("📡 Checking for sensor presence")

elem_list = state.get("elem_list", [])

parents, children = get_hierarchical_info("Point")

children_dict = {}
for child in children:
children_dict[child] = get_hierarchical_info(child)[1]

# Flatten the dictionary in a list
children_list = [elem for sublist in children_dict.values() for elem in sublist]

is_sensor = False

for elem in elem_list:
if elem in children_list:
is_sensor = True

if is_sensor:
return "get_sensors"
else:
return "schema_to_ttl"
13 changes: 9 additions & 4 deletions brickllm/graphs/brickschema_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from langgraph.graph import END, START, StateGraph

from .. import GraphConfig, State
from ..edges import validate_condition
from ..edges import check_sensor_presence, validate_condition
from ..nodes import (
get_elem_children,
get_elements,
Expand All @@ -30,18 +30,23 @@ def build_graph(self):
self.workflow.add_node("get_elem_children", get_elem_children)
self.workflow.add_node("get_relationships", get_relationships)
self.workflow.add_node("schema_to_ttl", schema_to_ttl)
# self.workflow.add_node("sensor_presence", sensor_presence)
self.workflow.add_node("validate_schema", validate_schema)
self.workflow.add_node("get_sensors", get_sensors)

# Add edges to define the flow logic
self.workflow.add_edge(START, "get_elements")
self.workflow.add_edge("get_elements", "get_elem_children")
self.workflow.add_edge("get_elem_children", "get_relationships")
self.workflow.add_edge("get_relationships", "schema_to_ttl")
self.workflow.add_conditional_edges(
"get_relationships",
check_sensor_presence,
{"get_sensors": "get_sensors", "schema_to_ttl": "schema_to_ttl"},
)
self.workflow.add_edge("get_sensors", "schema_to_ttl")
self.workflow.add_edge("schema_to_ttl", "validate_schema")
self.workflow.add_conditional_edges("validate_schema", validate_condition)
self.workflow.add_edge("get_relationships", "get_sensors")
self.workflow.add_edge("get_sensors", END)
self.workflow.add_edge("validate_schema", END)

def run(
self, input_data: Dict[str, Any], stream: bool = False
Expand Down
2 changes: 2 additions & 0 deletions brickllm/helpers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
get_elem_children_instructions,
get_elem_instructions,
get_relationships_instructions,
get_sensors_instructions,
prompt_template_local,
schema_to_ttl_instructions,
ttl_example,
Expand All @@ -18,4 +19,5 @@
"ttl_example",
"prompt_template_local",
"ttl_to_user_prompt",
"get_sensors_instructions",
]
10 changes: 9 additions & 1 deletion brickllm/helpers/llm_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from langchain_anthropic import ChatAnthropic
from langchain_community.llms import Ollama
from langchain_fireworks import ChatFireworks
from langchain_ollama import ChatOllama
from langchain_openai import ChatOpenAI


Expand All @@ -23,6 +24,8 @@ def _get_model(model: Union[str, BaseChatModel]) -> BaseChatModel:

if model == "openai":
return ChatOpenAI(temperature=0, model="gpt-4o")
elif model == "ollama3.2":
return ChatOllama(model="llama3.2")
elif model == "anthropic":
return ChatAnthropic(temperature=0, model_name="claude-3-sonnet-20240229")
elif model == "fireworks":
Expand All @@ -31,5 +34,10 @@ def _get_model(model: Union[str, BaseChatModel]) -> BaseChatModel:
)
elif model == "llama3.1:8b-brick":
return Ollama(model="llama3.1:8b-brick-v8")
elif model == "llama32-3B-brick":
return Ollama(model="hf.co/Giudice7/llama32-3B-brick-demo:latest")

else:
raise ValueError(f"Unsupported model type: {model}")
raise ValueError(
f"Unsupported model type: {model}. Load your own BaseChatModel if this one is not supported."
)
Loading

0 comments on commit a6540be

Please sign in to comment.