Skip to content

Commit

Permalink
fix: pre-commit
Browse files Browse the repository at this point in the history
  • Loading branch information
samuelrince committed Oct 28, 2024
1 parent 594d4fb commit 16de301
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 10 deletions.
2 changes: 1 addition & 1 deletion ecologits/impacts/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from typing import Any, Optional

from ecologits.impacts.dag import DAG
from ecologits.impacts.modeling import GWP, PE, ADPe, Embodied, Energy, Impacts, Usage, ValueOrRange
from ecologits.impacts.modeling import GWP, PE, ADPe, Embodied, Energy, Impacts, Usage
from ecologits.range_value import RangeValue, ValueOrRange

MODEL_QUANTIZATION_BITS = 4
Expand Down
4 changes: 2 additions & 2 deletions ecologits/model_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class Models(BaseModel):

class ModelRepository:

def __init__(self, models: list[Model], aliases: Optional[list[Alias]] = None):
def __init__(self, models: list[Model], aliases: Optional[list[Alias]] = None) -> None:
self.__models: dict[tuple[str, str], Model] = {}
for m in models:
key = m.provider.value, m.name
Expand Down Expand Up @@ -89,7 +89,7 @@ def from_json(cls, filepath: Optional[str] = None) -> "ModelRepository":
filepath = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "data", "models.json"
)
with open(filepath, "r") as fd:
with open(filepath) as fd:
data = json.load(fd)
mf = Models.model_validate(data)
return cls(models=mf.models, aliases=mf.aliases)
Expand Down
2 changes: 1 addition & 1 deletion ecologits/range_value.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from typing import Any, Union
from typing_extensions import Self

from pydantic import BaseModel, model_validator
from typing_extensions import Self


class RangeValue(BaseModel):
Expand Down
8 changes: 4 additions & 4 deletions ecologits/tracers/litellm_tracer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import time
from typing import Any, Callable, Union, Optional
from typing import Any, Callable, Optional, Union

from wrapt import wrap_function_wrapper

Expand All @@ -13,7 +13,7 @@
from litellm import AsyncCompletions, Completions
from litellm.types.utils import ModelResponse
from litellm.utils import CustomStreamWrapper
from rapidfuzz import process, fuzz
from rapidfuzz import fuzz, process

except ImportError:
ModelResponse = object()
Expand All @@ -35,7 +35,7 @@ class ChatCompletionChunk(ModelResponse):
_model_choices = [f"{m.provider.value}/{m.name}" for m in models.list_models()]


def litellm_match_model(model_name) -> Optional[tuple[str, str]]:
def litellm_match_model(model_name: str) -> Optional[tuple[str, str]]:
"""
Match according provider and model from a litellm model_name.
Expand All @@ -45,7 +45,6 @@ def litellm_match_model(model_name) -> Optional[tuple[str, str]]:
Returns:
A tuple (provider, model_name) matching a record of the ModelRepository.
"""
# print(process.extractOne(query=model_name, choices=_model_choices, scorer=fuzz.token_sort_ratio))
candidate = process.extractOne(
query=model_name,
choices=_model_choices,
Expand All @@ -55,6 +54,7 @@ def litellm_match_model(model_name) -> Optional[tuple[str, str]]:
if candidate is not None:
provider, model_name = candidate[0].split("/", 1)
return provider, model_name
return None


def litellm_chat_wrapper(
Expand Down
3 changes: 1 addition & 2 deletions ecologits/tracers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
from ecologits.impacts.llm import compute_llm_impacts
from ecologits.impacts.modeling import Impacts
from ecologits.log import logger
from ecologits.impacts.modeling import Impacts
from ecologits.model_repository import models, ArchitectureTypes
from ecologits.model_repository import ArchitectureTypes, models


def _avg(value_range: tuple) -> float:
Expand Down

0 comments on commit 16de301

Please sign in to comment.