From e9d1bc50d9126d376014ba77706bb732a8d1b329 Mon Sep 17 00:00:00 2001 From: adityasugandhi <39901010+adityasugandhi@users.noreply.github.com> Date: Wed, 21 Aug 2024 14:06:45 +0000 Subject: [PATCH 01/40] Added support for AzureAI client service --- .../components/model_client/azureai_client.py | 399 ++++++++++++++++++ 1 file changed, 399 insertions(+) create mode 100644 adalflow/adalflow/components/model_client/azureai_client.py diff --git a/adalflow/adalflow/components/model_client/azureai_client.py b/adalflow/adalflow/components/model_client/azureai_client.py new file mode 100644 index 00000000..226720b1 --- /dev/null +++ b/adalflow/adalflow/components/model_client/azureai_client.py @@ -0,0 +1,399 @@ +"""AzureOpenAI ModelClient integration.""" + +import os +from typing import ( + Dict, + Sequence, + Optional, + List, + Any, + TypeVar, + Callable, + Generator, + Union, + Literal, +) +import re + +import logging +import backoff + +# optional import +from adalflow.utils.lazy_import import safe_import, OptionalPackages + + +openai = safe_import(OptionalPackages.OPENAI.value[0], OptionalPackages.OPENAI.value[1]) +from azure.identity import DefaultAzureCredential, get_bearer_token_provider +from azure.core.credentials import AccessToken +from openai import AzureOpenAI, AsyncAzureOpenAI, Stream +from openai import ( + APITimeoutError, + InternalServerError, + RateLimitError, + UnprocessableEntityError, + BadRequestError, +) +from openai.types import ( + Completion, + CreateEmbeddingResponse, +) +from openai.types.chat import ChatCompletionChunk, ChatCompletion + +from adalflow.core.model_client import ModelClient +from adalflow.core.types import ( + ModelType, + EmbedderOutput, + TokenLogProb, + CompletionUsage, + GeneratorOutput, +) +from adalflow.components.model_client.utils import parse_embedding_response + +log = logging.getLogger(__name__) +T = TypeVar("T") + + +# completion parsing functions and you can combine them into one singple chat completion parser +def get_first_message_content(completion: ChatCompletion) -> str: + r"""When we only need the content of the first message. + It is the default parser for chat completion.""" + return completion.choices[0].message.content + + +# def _get_chat_completion_usage(completion: ChatCompletion) -> OpenAICompletionUsage: +# return completion.usage + + +def parse_stream_response(completion: ChatCompletionChunk) -> str: + r"""Parse the response of the stream API.""" + return completion.choices[0].delta.content + + +def handle_streaming_response(generator: Stream[ChatCompletionChunk]): + r"""Handle the streaming response.""" + for completion in generator: + log.debug(f"Raw chunk completion: {completion}") + parsed_content = parse_stream_response(completion) + yield parsed_content + + +def get_all_messages_content(completion: ChatCompletion) -> List[str]: + r"""When the n > 1, get all the messages content.""" + return [c.message.content for c in completion.choices] + + +def get_probabilities(completion: ChatCompletion) -> List[List[TokenLogProb]]: + r"""Get the probabilities of each token in the completion.""" + log_probs = [] + for c in completion.choices: + content = c.logprobs.content + print(content) + log_probs_for_choice = [] + for openai_token_logprob in content: + token = openai_token_logprob.token + logprob = openai_token_logprob.logprob + log_probs_for_choice.append(TokenLogProb(token=token, logprob=logprob)) + log_probs.append(log_probs_for_choice) + return log_probs + + +class AzureAIClient(ModelClient): + __doc__ = r"""A component wrapper for the AzureOpenAI API client. + + Support both embedding and chat completion API. + + Users (1) simplify use ``Embedder`` and ``Generator`` components by passing OpenAIClient() as the model_client. + (2) can use this as an example to create their own API client or extend this class(copying and modifing the code) in their own project. + + Note: + We suggest users not to use `response_format` to enforce output data type or `tools` and `tool_choice` in your model_kwargs when calling the API. + We do not know how AzureOpenAI is doing the formating or what prompt they have added. + Instead + - use :ref:`OutputParser` for response parsing and formating. + + Args: + api_key (Optional[str], optional): AzureOpenAI API key. Defaults to None. + chat_completion_parser (Callable[[Completion], Any], optional): A function to parse the chat completion to a str. Defaults to None. + Default is `get_first_message_content`. + + References: + - Embeddings models: https://platform.openai.com/docs/guides/embeddings + - Chat models: https://platform.openai.com/docs/guides/text-generation + - AzureOpenAI docs: https://learn.microsoft.com/en-us/azure/ai-services/openai/overview + """ + + def __init__( + self, + api_key: Optional[str] = None, + api_version:Optional[str]=None, + azure_endpoint: Optional[str]= None, + credential: Optional[DefaultAzureCredential] = None, + chat_completion_parser: Callable[[Completion], Any] = None, + input_type: Literal["text", "messages"] = "text", + ): + r"""It is recommended to set the OPENAI_API_KEY environment variable instead of passing it as an argument. + + + Initializes the Azure OpenAI client with either API key or AAD token authentication. + + Args: + api_key: Azure OpenAI API key. + api_version: Azure OpenAI API version. + azure_endpoint: Azure OpenAI endpoint. + credential: Azure AD credential for token-based authentication. + chat_completion_parser: Function to parse chat completions. + input_type: Input format, either "text" or "messages". + + """ + super().__init__() + + # added api_type azure for azure Ai + self.api_type = "azure" + self._api_key = api_key + self._apiversion= api_version + self._azure_endpoint = azure_endpoint + self._credential = credential + self.sync_client = self.init_sync_client() + self.async_client = None # only initialize if the async call is called + self.chat_completion_parser = ( + chat_completion_parser or get_first_message_content + ) + self._input_type = input_type + + def init_sync_client(self): + api_key = self._api_key or os.getenv("AZURE_OPENAI_API_KEY") + azure_endpoint = self._azure_endpoint or os.getenv("AZURE_OPENAI_ENDPOINT") + api_version = self._apiversion or os.getenv("AZURE_OPENAI_VERSION") + credential = self._credential or DefaultAzureCredential + if not azure_endpoint: + raise ValueError("Environment variable AZURE_OPENAI_ENDPOINT must be set") + if not api_version: + raise ValueError("Environment variable AZURE_OPENAI_VERSION must be set") + + if api_key: + return AzureOpenAI(api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version) + elif self._credential: + # credential = DefaultAzureCredential() + token_provider = get_bearer_token_provider(credential) + return AzureOpenAI(token=token_provider(), azure_endpoint=azure_endpoint, api_version=api_version) + else: + raise ValueError("Environment variable AZURE_OPENAI_API_KEY must be set or credential must be provided") + + + def init_async_client(self): + api_key = self._api_key or os.getenv("AZURE_OPENAI_API_KEY") + azure_endpoint = self._azure_endpoint or os.getenv("AZURE_OPENAI_ENDPOINT") + api_version = self._apiversion or os.getenv("AZURE_OPENAI_VERSION") + credential = self._credential or DefaultAzureCredential() + if not azure_endpoint: + raise ValueError("Environment variable AZURE_OPENAI_ENDPOINT must be set") + if not api_version: + raise ValueError("Environment variable AZURE_OPENAI_VERSION must be set") + + if api_key: + return AsyncAzureOpenAI(api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version) + elif self._credential: + # credential = DefaultAzureCredential() + token_provider = get_bearer_token_provider(credential) + return AsyncAzureOpenAI(token=token_provider(), azure_endpoint=azure_endpoint, api_version=api_version) + else: + raise ValueError("Environment variable AZURE_OPENAI_API_KEY must be set or credential must be provided") + + # def _parse_chat_completion(self, completion: ChatCompletion) -> "GeneratorOutput": + # # TODO: raw output it is better to save the whole completion as a source of truth instead of just the message + # try: + # data = self.chat_completion_parser(completion) + # usage = self.track_completion_usage(completion) + # return GeneratorOutput( + # data=data, error=None, raw_response=str(data), usage=usage + # ) + # except Exception as e: + # log.error(f"Error parsing the completion: {e}") + # return GeneratorOutput(data=None, error=str(e), raw_response=completion) + + def parse_chat_completion( + self, + completion: Union[ChatCompletion, Generator[ChatCompletionChunk, None, None]], + ) -> "GeneratorOutput": + """Parse the completion, and put it into the raw_response.""" + log.debug(f"completion: {completion}, parser: {self.chat_completion_parser}") + try: + data = self.chat_completion_parser(completion) + usage = self.track_completion_usage(completion) + return GeneratorOutput( + data=None, error=None, raw_response=data, usage=usage + ) + except Exception as e: + log.error(f"Error parsing the completion: {e}") + return GeneratorOutput(data=None, error=str(e), raw_response=completion) + + def track_completion_usage( + self, + completion: Union[ChatCompletion, Generator[ChatCompletionChunk, None, None]], + ) -> CompletionUsage: + if isinstance(completion, ChatCompletion): + usage: CompletionUsage = CompletionUsage( + completion_tokens=completion.usage.completion_tokens, + prompt_tokens=completion.usage.prompt_tokens, + total_tokens=completion.usage.total_tokens, + ) + return usage + else: + raise NotImplementedError( + "streaming completion usage tracking is not implemented" + ) + + def parse_embedding_response( + self, response: CreateEmbeddingResponse + ) -> EmbedderOutput: + r"""Parse the embedding response to a structure LightRAG components can understand. + + Should be called in ``Embedder``. + """ + try: + return parse_embedding_response(response) + except Exception as e: + log.error(f"Error parsing the embedding response: {e}") + return EmbedderOutput(data=[], error=str(e), raw_response=response) + + def convert_inputs_to_api_kwargs( + self, + input: Optional[Any] = None, + model_kwargs: Dict = {}, + model_type: ModelType = ModelType.UNDEFINED, + ) -> Dict: + r""" + Specify the API input type and output api_kwargs that will be used in _call and _acall methods. + Convert the Component's standard input, and system_input(chat model) and model_kwargs into API-specific format + """ + + final_model_kwargs = model_kwargs.copy() + if model_type == ModelType.EMBEDDER: + if isinstance(input, str): + input = [input] + # convert input to input + if not isinstance(input, Sequence): + raise TypeError("input must be a sequence of text") + final_model_kwargs["input"] = input + elif model_type == ModelType.LLM: + # convert input to messages + messages: List[Dict[str, str]] = [] + + if self._input_type == "messages": + system_start_tag = "" + system_end_tag = "" + user_start_tag = "" + user_end_tag = "" + pattern = f"{system_start_tag}(.*?){system_end_tag}{user_start_tag}(.*?){user_end_tag}" + # Compile the regular expression + regex = re.compile(pattern) + # Match the pattern + match = regex.search(input) + system_prompt, input_str = None, None + + if match: + system_prompt = match.group(1) + input_str = match.group(2) + + else: + print("No match found.") + if system_prompt and input_str: + messages.append({"role": "system", "content": system_prompt}) + messages.append({"role": "user", "content": input_str}) + if len(messages) == 0: + messages.append({"role": "system", "content": input}) + final_model_kwargs["messages"] = messages + else: + raise ValueError(f"model_type {model_type} is not supported") + return final_model_kwargs + + @backoff.on_exception( + backoff.expo, + ( + APITimeoutError, + InternalServerError, + RateLimitError, + UnprocessableEntityError, + BadRequestError, + ), + max_time=5, + ) + def call(self, api_kwargs: Dict = {}, model_type: ModelType = ModelType.UNDEFINED): + """ + kwargs is the combined input and model_kwargs. Support streaming call. + """ + log.info(f"api_kwargs: {api_kwargs}") + if model_type == ModelType.EMBEDDER: + return self.sync_client.embeddings.create(**api_kwargs) + elif model_type == ModelType.LLM: + if "stream" in api_kwargs and api_kwargs.get("stream", False): + log.debug("streaming call") + self.chat_completion_parser = handle_streaming_response + return self.sync_client.chat.completions.create(**api_kwargs) + return self.sync_client.chat.completions.create(**api_kwargs) + else: + raise ValueError(f"model_type {model_type} is not supported") + + @backoff.on_exception( + backoff.expo, + ( + APITimeoutError, + InternalServerError, + RateLimitError, + UnprocessableEntityError, + BadRequestError, + ), + max_time=5, + ) + async def acall( + self, api_kwargs: Dict = {}, model_type: ModelType = ModelType.UNDEFINED + ): + """ + kwargs is the combined input and model_kwargs + """ + if self.async_client is None: + self.async_client = self.init_async_client() + if model_type == ModelType.EMBEDDER: + return await self.async_client.embeddings.create(**api_kwargs) + elif model_type == ModelType.LLM: + return await self.async_client.chat.completions.create(**api_kwargs) + else: + raise ValueError(f"model_type {model_type} is not supported") + + @classmethod + def from_dict(cls: type[T], data: Dict[str, Any]) -> T: + obj = super().from_dict(data) + # recreate the existing clients + obj.sync_client = obj.init_sync_client() + obj.async_client = obj.init_async_client() + return obj + + def to_dict(self) -> Dict[str, Any]: + r"""Convert the component to a dictionary.""" + # TODO: not exclude but save yes or no for recreating the clients + exclude = [ + "sync_client", + "async_client", + ] # unserializable object + output = super().to_dict(exclude=exclude) + return output + + +# if __name__ == "__main__": +# from adalflow.core import Generator +# from adalflow.utils import setup_env, get_logger + +# log = get_logger(level="DEBUG") + +# setup_env() +# prompt_kwargs = {"input_str": "What is the meaning of life?"} + +# gen = Generator( +# model_client=OpenAIClient(), +# model_kwargs={"model": "gpt-3.5-turbo", "stream": True}, +# ) +# gen_response = gen(prompt_kwargs) +# print(f"gen_response: {gen_response}") + +# for genout in gen_response.data: +# print(f"genout: {genout}") From 3da6636f2217bc62dc42543c7d7d0e89cbfa93a1 Mon Sep 17 00:00:00 2001 From: adityasugandhi <39901010+adityasugandhi@users.noreply.github.com> Date: Tue, 27 Aug 2024 21:49:20 -0400 Subject: [PATCH 02/40] Update azureai_client.py fixed azure_ad_token provider in AzureOpenAI --- .../adalflow/components/model_client/azureai_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/adalflow/adalflow/components/model_client/azureai_client.py b/adalflow/adalflow/components/model_client/azureai_client.py index 226720b1..be94d07f 100644 --- a/adalflow/adalflow/components/model_client/azureai_client.py +++ b/adalflow/adalflow/components/model_client/azureai_client.py @@ -174,8 +174,8 @@ def init_sync_client(self): return AzureOpenAI(api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version) elif self._credential: # credential = DefaultAzureCredential() - token_provider = get_bearer_token_provider(credential) - return AzureOpenAI(token=token_provider(), azure_endpoint=azure_endpoint, api_version=api_version) + token_provider = get_bearer_token_provider(DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default") + return AzureOpenAI(azure_ad_token_provider=token_provider, azure_endpoint=azure_endpoint, api_version=api_version) else: raise ValueError("Environment variable AZURE_OPENAI_API_KEY must be set or credential must be provided") @@ -194,8 +194,8 @@ def init_async_client(self): return AsyncAzureOpenAI(api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version) elif self._credential: # credential = DefaultAzureCredential() - token_provider = get_bearer_token_provider(credential) - return AsyncAzureOpenAI(token=token_provider(), azure_endpoint=azure_endpoint, api_version=api_version) + token_provider = get_bearer_token_provider(DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default") + return AsyncAzureOpenAI(azure_ad_token_provider=token_provider, azure_endpoint=azure_endpoint, api_version=api_version) else: raise ValueError("Environment variable AZURE_OPENAI_API_KEY must be set or credential must be provided") From 101d1c3b328167426c35d72651ed9da07971a0b1 Mon Sep 17 00:00:00 2001 From: adityasugandhi <39901010+adityasugandhi@users.noreply.github.com> Date: Sun, 8 Sep 2024 05:29:06 +0000 Subject: [PATCH 03/40] added azure --- pyproject.toml | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index be553aad..56ab3998 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,3 @@ - [tool.poetry] name = "adalflow-project" version = "0.1.0" @@ -17,14 +16,9 @@ packages = [ [tool.poetry.dependencies] python = ">=3.11, <4.0" adalflow = { path = "adalflow", develop = true } -# torch = "^2.3.1" openai = "^1.34.0" -# lightrag = {path = "lightrag/dist/lightrag-0.0.0a11-py3-none-any.whl"} -# lightrag = "^0.0.0a13" - [tool.poetry.group.dev.dependencies] - pre-commit = "^3.7.0" anthropic = "^0.26.0" torchmetrics = "^1.4.0.post0" @@ -43,7 +37,12 @@ textgrad = "^0.1.4" tensorboardx = "^2.6.2.2" tensorboard = "^2.17.0" dspy-ai = "^2.4.13" +transformers = "^4.44.0" +accelerate = "^0.33.0" +[tool.poetry.group.azure.dependencies] +azure-storage-blob = "^12.14.0" +azure-identity = "^1.12.0" # [tool.poetry.group.doc.dependencies] # pydata-sphinx-theme = "^0.15.3" @@ -58,9 +57,6 @@ dspy-ai = "^2.4.13" # google-generativeai = "^0.7.1" # faiss-cpu = "^1.8.0.post1" # lightrag = { path = "lightrag", develop = true } -transformers = "^4.44.0" -accelerate = "^0.33.0" - [build-system] requires = ["poetry-core>=1.0.0"] From f249f101283996f487dd25f5c2bf693a798669fc Mon Sep 17 00:00:00 2001 From: adityasugandhi <39901010+adityasugandhi@users.noreply.github.com> Date: Sun, 8 Sep 2024 06:04:19 +0000 Subject: [PATCH 04/40] added test for Azure client --- adalflow/tests/test_AzureClient.py | 119 +++++++++++++++++++++++++++++ 1 file changed, 119 insertions(+) create mode 100644 adalflow/tests/test_AzureClient.py diff --git a/adalflow/tests/test_AzureClient.py b/adalflow/tests/test_AzureClient.py new file mode 100644 index 00000000..8bfc1ce7 --- /dev/null +++ b/adalflow/tests/test_AzureClient.py @@ -0,0 +1,119 @@ +import unittest +from unittest.mock import patch, MagicMock +from azure.identity import DefaultAzureCredential +from openai import AzureOpenAI +from openai.types import Completion, CreateEmbeddingResponse +from openai.types.chat import ChatCompletionChunk, ChatCompletion +from adalflow.core.model_client import ModelClient +from adalflow.core.types import ModelType, EmbedderOutput, TokenLogProb, CompletionUsage, GeneratorOutput +from adalflow.components.model_client.openai_client import AzureAIClient + +class TestAzureAIClient(unittest.TestCase): + + @patch('adalflow.components.model_client.openai_client.AzureOpenAI') + @patch('adalflow.components.model_client.openai_client.DefaultAzureCredential') + def setUp(self, MockDefaultAzureCredential, MockAzureOpenAI): + self.mock_credential = MockDefaultAzureCredential() + self.mock_sync_client = MockAzureOpenAI.return_value + self.client = AzureAIClient( + api_key="test_api_key", + api_version="v1", + azure_endpoint="https://test.endpoint", + credential=self.mock_credential, + ) + self.client.sync_client = self.mock_sync_client + + def test_init_sync_client_with_api_key(self): + client = AzureAIClient(api_key="test_key", api_version="v1", azure_endpoint="https://test.endpoint") + self.assertIsInstance(client.sync_client, AzureOpenAI) + + def test_init_sync_client_with_credential(self): + client = AzureAIClient( + api_version="v1", + azure_endpoint="https://test.endpoint", + credential=self.mock_credential + ) + self.assertIsInstance(client.sync_client, AzureOpenAI) + + @patch('adalflow.components.model_client.openai_client.AzureOpenAI') + def test_call_embeddings(self, MockAzureOpenAI): + mock_embeddings = MagicMock() + MockAzureOpenAI.return_value.embeddings.create = mock_embeddings + api_kwargs = {'input': ["test"]} + model_type = ModelType.EMBEDDER + self.client.call(api_kwargs=api_kwargs, model_type=model_type) + MockAzureOpenAI.return_value.embeddings.create.assert_called_once_with(**api_kwargs) + + @patch('adalflow.components.model_client.openai_client.AzureOpenAI') + def test_call_chat_completions(self, MockAzureOpenAI): + mock_chat_completions = MagicMock() + MockAzureOpenAI.return_value.chat.completions.create = mock_chat_completions + api_kwargs = {'input': "test"} + model_type = ModelType.LLM + self.client.call(api_kwargs=api_kwargs, model_type=model_type) + MockAzureOpenAI.return_value.chat.completions.create.assert_called_once_with(**api_kwargs) + + @patch('adalflow.components.model_client.openai_client.AzureOpenAI') + def test_parse_chat_completion(self, MockAzureOpenAI): + mock_chat_completion = MagicMock(spec=ChatCompletion) + mock_chat_completion.choices = [MagicMock(message=MagicMock(content="test_content"))] + self.client.chat_completion_parser = lambda completion: completion.choices[0].message.content + result = self.client.parse_chat_completion(mock_chat_completion) + self.assertEqual(result, "test_content") + + @patch('adalflow.components.model_client.openai_client.AzureOpenAI') + def test_track_completion_usage(self, MockAzureOpenAI): + mock_chat_completion = MagicMock(spec=ChatCompletion) + mock_chat_completion.usage = MagicMock( + completion_tokens=10, + prompt_tokens=5, + total_tokens=15 + ) + result = self.client.track_completion_usage(mock_chat_completion) + self.assertEqual(result.completion_tokens, 10) + self.assertEqual(result.prompt_tokens, 5) + self.assertEqual(result.total_tokens, 15) + + @patch('adalflow.components.model_client.openai_client.AzureOpenAI') + def test_parse_embedding_response(self, MockAzureOpenAI): + mock_embedding_response = MagicMock(spec=CreateEmbeddingResponse) + self.client.parse_embedding_response = lambda response: EmbedderOutput(data=["test_embedding"], error=None, raw_response=response) + result = self.client.parse_embedding_response(mock_embedding_response) + self.assertEqual(result.data, ["test_embedding"]) + + @patch('adalflow.components.model_client.openai_client.AzureOpenAI') + def test_convert_inputs_to_api_kwargs(self, MockAzureOpenAI): + input_data = "test input" + model_kwargs = {"param": "value"} + result = self.client.convert_inputs_to_api_kwargs(input=input_data, model_kwargs=model_kwargs, model_type=ModelType.LLM) + expected = { + "input": input_data, + "param": "value" + } + self.assertEqual(result, expected) + + def test_from_dict(self): + data = { + "api_key": "test_key", + "api_version": "v1", + "azure_endpoint": "https://test.endpoint", + "credential": self.mock_credential, + } + client = AzureAIClient.from_dict(data) + self.assertEqual(client._api_key, "test_key") + self.assertEqual(client._apiversion, "v1") + self.assertEqual(client._azure_endpoint, "https://test.endpoint") + + def test_to_dict(self): + expected = { + "api_key": "test_api_key", + "api_version": "v1", + "azure_endpoint": "https://test.endpoint", + "credential": self.mock_credential, + } + result = self.client.to_dict() + for key, value in expected.items(): + self.assertEqual(result.get(key), value) + +if __name__ == '__main__': + unittest.main() From 0847c7deaedc9411d17214b4532728558ddc6095 Mon Sep 17 00:00:00 2001 From: adityasugandhi <39901010+adityasugandhi@users.noreply.github.com> Date: Sun, 8 Sep 2024 06:04:48 +0000 Subject: [PATCH 05/40] added azure extrac packages --- adalflow/pyproject.toml | 35 ++++++++++++++--------------------- 1 file changed, 14 insertions(+), 21 deletions(-) diff --git a/adalflow/pyproject.toml b/adalflow/pyproject.toml index 3cf1148e..913dd0bc 100644 --- a/adalflow/pyproject.toml +++ b/adalflow/pyproject.toml @@ -1,12 +1,10 @@ [tool.poetry] name = "adalflow" - version = "0.2.0" description = "The Library to Build and Auto-optimize Any LLM Task Pipeline" authors = ["Li Yin "] readme = "README.md" repository = "https://github.com/SylphAI-Inc/LightRAG" - license = "MIT" maintainers = ["Li Yin "] classifiers = [ @@ -27,10 +25,8 @@ include = [ "adalflow/optim/*", ] - packages = [{ include = "adalflow", from = "." }] - [tool.poetry.dependencies] python = ">=3.9, <4.0" @@ -43,8 +39,7 @@ numpy = "^1.26.4" tqdm = "^4.66.4" pyyaml = "^6.0.1" nest-asyncio = "^1.6.0" -botocore = "^1.34.149" # if not specified, it will fail to resolve the dependency - +botocore = "^1.34.149" # Optional dependencies openai = { version = "^1.12.0", optional = true } @@ -59,6 +54,11 @@ cohere = { version = "^5.5.8", optional = true } ollama = { version = "^0.2.1", optional = true } diskcache = "^5.6.3" +# Azure dependencies +azure-core = { version = "^1.24.0", optional = true } +azure-identity = { version = "^1.12.0", optional = true } +azure-ai-formrecognizer = { version = "^3.2.0", optional = true } +azure-ai-textanalytics = { version = "^5.3.0", optional = true } [tool.poetry.group.test.dependencies] pytest = "^8.1.1" @@ -71,14 +71,12 @@ groq = "^0.9.0" google-generativeai = "^0.7.2" anthropic = "^0.31.1" - [tool.poetry.group.typing.dependencies] mypy = "^1" types-pyyaml = "^6.0.12.20240311" # for mypy types-tqdm = "^4.66.0.20240417" - -[tool.poetry.group.dev.dependencies] # specify the versions for extras +[tool.poetry.group.dev.dependencies] pre-commit = "^3.7.0" graphviz = "^0.20.3" tensorboard = "^2.17.0" @@ -86,8 +84,7 @@ tensorboardx = "^2.6.2.2" matplotlib = "^3.9.1" diskcache = "^5.6.3" - -[tool.poetry.extras] # allow pip install adalflow[openai, groq] +[tool.poetry.extras] openai = ["openai"] groq = ["groq"] anthropic = ["anthropic"] @@ -98,25 +95,21 @@ faiss-cpu = ["faiss-cpu"] sqlalchemy = ["sqlalchemy"] torch = ["torch"] ollama = ["ollama"] +azure = [ + "azure-core", + "azure-identity", + "azure-ai-formrecognizer", + "azure-ai-textanalytics" +] - -# [[tool.poetry.source]] -# name = "pypi" -# url = "https://pypi.org/simple" -# default = true [[tool.poetry.source]] name = "nvidia-pypi" priority = "supplemental" url = "https://pypi.nvidia.com" -# [[tool.poetry.source]] -# name = "nvidia-pypi" -# priority = "supplemental" -# url = "https://pypi.nvidia.com" [tool.ruff] exclude = ["images"] - [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" From 02b7187bd2a03fe372e7fe363d3c223e5f85b20c Mon Sep 17 00:00:00 2001 From: adityasugandhi <39901010+adityasugandhi@users.noreply.github.com> Date: Sun, 8 Sep 2024 06:09:52 +0000 Subject: [PATCH 06/40] fixed doc string --- .../components/model_client/azureai_client.py | 87 +++++++++++++++---- 1 file changed, 69 insertions(+), 18 deletions(-) diff --git a/adalflow/adalflow/components/model_client/azureai_client.py b/adalflow/adalflow/components/model_client/azureai_client.py index be94d07f..70699681 100644 --- a/adalflow/adalflow/components/model_client/azureai_client.py +++ b/adalflow/adalflow/components/model_client/azureai_client.py @@ -98,28 +98,79 @@ def get_probabilities(completion: ChatCompletion) -> List[List[TokenLogProb]]: class AzureAIClient(ModelClient): - __doc__ = r"""A component wrapper for the AzureOpenAI API client. + __doc__ = r""" + A client wrapper for interacting with Azure OpenAI's API. - Support both embedding and chat completion API. + This class provides support for both embedding and chat completion API calls. + Users can use this class to simplify their interactions with Azure OpenAI models + through the `Embedder` and `Generator` components. - Users (1) simplify use ``Embedder`` and ``Generator`` components by passing OpenAIClient() as the model_client. - (2) can use this as an example to create their own API client or extend this class(copying and modifing the code) in their own project. + **Initialization:** - Note: - We suggest users not to use `response_format` to enforce output data type or `tools` and `tool_choice` in your model_kwargs when calling the API. - We do not know how AzureOpenAI is doing the formating or what prompt they have added. - Instead - - use :ref:`OutputParser` for response parsing and formating. + You can initialize the `AzureAIClient` with either an API key or Azure Active Directory (AAD) token + authentication. It is recommended to set environment variables for sensitive data like API keys. Args: - api_key (Optional[str], optional): AzureOpenAI API key. Defaults to None. - chat_completion_parser (Callable[[Completion], Any], optional): A function to parse the chat completion to a str. Defaults to None. - Default is `get_first_message_content`. - - References: - - Embeddings models: https://platform.openai.com/docs/guides/embeddings - - Chat models: https://platform.openai.com/docs/guides/text-generation - - AzureOpenAI docs: https://learn.microsoft.com/en-us/azure/ai-services/openai/overview + api_key (Optional[str]): Azure OpenAI API key. Default is None. + api_version (Optional[str]): API version to use. Default is None. + azure_endpoint (Optional[str]): Azure OpenAI endpoint URL. Default is None. + credential (Optional[DefaultAzureCredential]): Azure AD credential for token-based authentication. Default is None. + chat_completion_parser (Callable[[Completion], Any]): Function to parse chat completions. Default is `get_first_message_content`. + input_type (Literal["text", "messages"]): Format for input, either "text" or "messages". Default is "text". + + **Setup Instructions:** + + - **Using API Key:** + Set up the following environment variables: + ```bash + export AZURE_OPENAI_API_KEY="your_api_key" + export AZURE_OPENAI_ENDPOINT="your_endpoint" + export AZURE_OPENAI_VERSION="your_version" + ``` + + - **Using Azure AD Token:** + Ensure you have configured Azure AD credentials. The `DefaultAzureCredential` will automatically use your configured credentials. + + **Example Usage:**do + + ```python + from azure.identity import DefaultAzureCredential + from your_module import AzureAIClient # Adjust import based on your module name + + # Initialize with API key + client = AzureAIClient( + api_key="your_api_key", + api_version="2023-05-15", + azure_endpoint="https://your-endpoint.openai.azure.com/" + ) + + # Or initialize with Azure AD token + client = AzureAIClient( + api_version="2023-05-15", + azure_endpoint="https://your-endpoint.openai.azure.com/", + credential=DefaultAzureCredential() + ) + + # Example call to the chat completion API + api_kwargs = { + "model": "gpt-3.5-turbo", + "messages": [{"role": "user", "content": "What is the meaning of life?"}], + "stream": True + } + response = client.call(api_kwargs=api_kwargs, model_type=ModelType.LLM) + + for chunk in response: + print(chunk) + ``` + + **Notes:** + - Ensure that the API key or credentials are correctly set up and accessible to avoid authentication errors. + - Use `chat_completion_parser` to define how to extract and handle the chat completion responses. + - The `input_type` parameter determines how input is formatted for the API call. + + **References:** + - [Azure OpenAI API Documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview) + - [OpenAI API Documentation](https://platform.openai.com/docs/guides/text-generation) """ def __init__( @@ -131,7 +182,7 @@ def __init__( chat_completion_parser: Callable[[Completion], Any] = None, input_type: Literal["text", "messages"] = "text", ): - r"""It is recommended to set the OPENAI_API_KEY environment variable instead of passing it as an argument. + r"""It is recommended to set the API_KEY into the environment variable instead of passing it as an argument. Initializes the Azure OpenAI client with either API key or AAD token authentication. From de1fe125fc2bf8f1e100965d786086269235f39d Mon Sep 17 00:00:00 2001 From: adityasugandhi <39901010+adityasugandhi@users.noreply.github.com> Date: Sun, 8 Sep 2024 11:36:06 +0000 Subject: [PATCH 07/40] added azure auth services --- adalflow/adalflow/utils/lazy_import.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/adalflow/adalflow/utils/lazy_import.py b/adalflow/adalflow/utils/lazy_import.py index 9a2b0c1c..4a604543 100644 --- a/adalflow/adalflow/utils/lazy_import.py +++ b/adalflow/adalflow/utils/lazy_import.py @@ -32,6 +32,8 @@ class OptionalPackages(Enum): # modeling library TORCH = ("torch", "Please install torch with: pip install torch") + + AZURE_AUTH_SERVICES = ("AZURE_AUTH_SERVICES", "Please install AZURE_AUTH_SERVICES with: pip install azure-identity azure-core") # search library FAISS = ("faiss", "Please install faiss with: pip install faiss") From 1c4aee0278ad9b05ed2c9d8bb4380efa74bedafc Mon Sep 17 00:00:00 2001 From: adityasugandhi <39901010+adityasugandhi@users.noreply.github.com> Date: Sun, 8 Sep 2024 11:51:34 +0000 Subject: [PATCH 08/40] small bug fix --- pyproject.toml | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 56ab3998..6054c08c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,4 @@ + [tool.poetry] name = "adalflow-project" version = "0.1.0" @@ -16,9 +17,14 @@ packages = [ [tool.poetry.dependencies] python = ">=3.11, <4.0" adalflow = { path = "adalflow", develop = true } +# torch = "^2.3.1" openai = "^1.34.0" +# lightrag = {path = "lightrag/dist/lightrag-0.0.0a11-py3-none-any.whl"} +# lightrag = "^0.0.0a13" + [tool.poetry.group.dev.dependencies] + pre-commit = "^3.7.0" anthropic = "^0.26.0" torchmetrics = "^1.4.0.post0" @@ -37,12 +43,7 @@ textgrad = "^0.1.4" tensorboardx = "^2.6.2.2" tensorboard = "^2.17.0" dspy-ai = "^2.4.13" -transformers = "^4.44.0" -accelerate = "^0.33.0" -[tool.poetry.group.azure.dependencies] -azure-storage-blob = "^12.14.0" -azure-identity = "^1.12.0" # [tool.poetry.group.doc.dependencies] # pydata-sphinx-theme = "^0.15.3" @@ -57,7 +58,10 @@ azure-identity = "^1.12.0" # google-generativeai = "^0.7.1" # faiss-cpu = "^1.8.0.post1" # lightrag = { path = "lightrag", develop = true } +transformers = "^4.44.0" +accelerate = "^0.33.0" + [build-system] requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" +build-backend = "poetry.core.masonry.api" \ No newline at end of file From 9515f30fc3b6b81accf9b9cb4cac46c7421662b5 Mon Sep 17 00:00:00 2001 From: Li Yin Date: Sun, 6 Oct 2024 11:39:47 +0800 Subject: [PATCH 09/40] make boto3 a safe import, update the poetry lock, add better doc string --- .../components/model_client/bedrock_client.py | 115 +++++++++++++----- adalflow/adalflow/utils/lazy_import.py | 4 +- adalflow/poetry.lock | 95 ++++++++------- 3 files changed, 136 insertions(+), 78 deletions(-) diff --git a/adalflow/adalflow/components/model_client/bedrock_client.py b/adalflow/adalflow/components/model_client/bedrock_client.py index 549c1988..2f670d43 100644 --- a/adalflow/adalflow/components/model_client/bedrock_client.py +++ b/adalflow/adalflow/components/model_client/bedrock_client.py @@ -8,6 +8,10 @@ from adalflow.core.model_client import ModelClient from adalflow.core.types import ModelType, CompletionUsage, GeneratorOutput +from adalflow.utils.lazy_import import safe_import, OptionalPackages + +boto3 = safe_import(OptionalPackages.BOTO3.value[0], OptionalPackages.BOTO3.value[1]) + import boto3 from botocore.config import Config @@ -15,34 +19,83 @@ bedrock_runtime_exceptions = boto3.client( service_name="bedrock-runtime", - region_name=os.getenv("AWS_REGION_NAME", "us-east-1") + region_name=os.getenv("AWS_REGION_NAME", "us-east-1"), ).exceptions def get_first_message_content(completion: Dict) -> str: r"""When we only need the content of the first message. It is the default parser for chat completion.""" - return completion['output']['message']['content'][0]['text'] + return completion["output"]["message"]["content"][0]["text"] -__all__ = ["BedrockAPIClient", "get_first_message_content", "bedrock_runtime_exceptions"] +__all__ = [ + "BedrockAPIClient", + "get_first_message_content", + "bedrock_runtime_exceptions", +] class BedrockAPIClient(ModelClient): __doc__ = r"""A component wrapper for the Bedrock API client. - Visit https://docs.aws.amazon.com/bedrock/latest/APIReference/welcome.html for more api details. + + Support: + 1. AWS Titan + 2. Claude + 3. Cohere + 4. LLama + 5. Mistral + 6. Jamba + + Setup: + 1. Install boto3: `pip install boto3` + 2. Ensure you have the AWS credentials set up. There are four variables you can optionally set: + - AWS_PROFILE_NAME: The name of the AWS profile to use. + - AWS_REGION_NAME: The name of the AWS region to use. + - AWS_ACCESS_KEY_ID: The AWS access key ID. + - AWS_SECRET_ACCESS_KEY: The AWS secret access key. + + + Relevant API docs: + 1. https://docs.aws.amazon.com/bedrock/latest/APIReference/welcome.html + 2. https://docs.aws.amazon.com/bedrock/latest/userguide/getting-started-api-ex-python.html + 3. https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html#API_runtime_Converse_RequestParameters + + Example: + + .. code-block:: python + + from adalflow.components.model_client import BedrockAPIClient + + template = " + You are a helpful assistant. + + User: {{input_str}} + You: + " + + # use AWS_PROFILE_NAME and AWS_REGION_NAME from the environment variables in this case + self.generator = Generator( + model_client=BedrockAPIClient(), + model_kwargs={ + "modelId": "anthropic.claude-3-sonnet-20240229-v1:0", + "inferenceConfig": { + "temperature": 0.8 + } + }, template=template + ) """ def __init__( - self, - aws_profile_name=None, - aws_region_name=None, - aws_access_key_id=None, - aws_secret_access_key=None, - aws_session_token=None, - aws_connection_timeout=None, - aws_read_timeout=None, - chat_completion_parser: Callable = None, + self, + aws_profile_name=None, + aws_region_name=None, + aws_access_key_id=None, + aws_secret_access_key=None, + aws_session_token=None, + aws_connection_timeout=None, + aws_read_timeout=None, + chat_completion_parser: Callable = None, ): super().__init__() self._aws_profile_name = aws_profile_name @@ -56,7 +109,7 @@ def __init__( self.session = None self.sync_client = self.init_sync_client() self.chat_completion_parser = ( - chat_completion_parser or get_first_message_content + chat_completion_parser or get_first_message_content ) def init_sync_client(self): @@ -67,14 +120,16 @@ def init_sync_client(self): aws_profile_name = self._aws_profile_name or os.getenv("AWS_PROFILE_NAME") aws_region_name = self._aws_region_name or os.getenv("AWS_REGION_NAME") aws_access_key_id = self._aws_access_key_id or os.getenv("AWS_ACCESS_KEY_ID") - aws_secret_access_key = self._aws_secret_access_key or os.getenv("AWS_SECRET_ACCESS_KEY") + aws_secret_access_key = self._aws_secret_access_key or os.getenv( + "AWS_SECRET_ACCESS_KEY" + ) aws_session_token = self._aws_session_token or os.getenv("AWS_SESSION_TOKEN") config = None if self._aws_connection_timeout or self._aws_read_timeout: config = Config( connect_timeout=self._aws_connection_timeout, # Connection timeout in seconds - read_timeout=self._aws_read_timeout # Read timeout in seconds + read_timeout=self._aws_read_timeout, # Read timeout in seconds ) session = boto3.Session( @@ -93,7 +148,7 @@ def init_async_client(self): def parse_chat_completion(self, completion): log.debug(f"completion: {completion}") try: - data = completion['output']['message']['content'][0]['text'] + data = completion["output"]["message"]["content"][0]["text"] usage = self.track_completion_usage(completion) return GeneratorOutput(data=None, usage=usage, raw_response=data) except Exception as e: @@ -104,18 +159,18 @@ def parse_chat_completion(self, completion): def track_completion_usage(self, completion: Dict) -> CompletionUsage: r"""Track the completion usage.""" - usage = completion['usage'] + usage = completion["usage"] return CompletionUsage( - completion_tokens=usage['outputTokens'], - prompt_tokens=usage['inputTokens'], - total_tokens=usage['totalTokens'] + completion_tokens=usage["outputTokens"], + prompt_tokens=usage["inputTokens"], + total_tokens=usage["totalTokens"], ) def convert_inputs_to_api_kwargs( - self, - input: Optional[Any] = None, - model_kwargs: Dict = {}, - model_type: ModelType = ModelType.UNDEFINED + self, + input: Optional[Any] = None, + model_kwargs: Dict = {}, + model_type: ModelType = ModelType.UNDEFINED, ): """ check the converse api doc here: @@ -133,11 +188,11 @@ def convert_inputs_to_api_kwargs( @backoff.on_exception( backoff.expo, ( - bedrock_runtime_exceptions.ThrottlingException, - bedrock_runtime_exceptions.ModelTimeoutException, - bedrock_runtime_exceptions.InternalServerException, - bedrock_runtime_exceptions.ModelErrorException, - bedrock_runtime_exceptions.ValidationException + bedrock_runtime_exceptions.ThrottlingException, + bedrock_runtime_exceptions.ModelTimeoutException, + bedrock_runtime_exceptions.InternalServerException, + bedrock_runtime_exceptions.ModelErrorException, + bedrock_runtime_exceptions.ValidationException, ), max_time=5, ) diff --git a/adalflow/adalflow/utils/lazy_import.py b/adalflow/adalflow/utils/lazy_import.py index aa4c988f..ec8244ec 100644 --- a/adalflow/adalflow/utils/lazy_import.py +++ b/adalflow/adalflow/utils/lazy_import.py @@ -29,6 +29,8 @@ class OptionalPackages(Enum): ) COHERE = ("cohere", "Please install cohere with: pip install cohere") OLLAMA = ("ollama", "Please install ollama with: pip install ollama") + # AWS + BOTO3 = ("boto3", "Please install boto3 with: pip install boto3") # modeling library TORCH = ("torch", "Please install torch with: pip install torch") @@ -78,7 +80,7 @@ class LazyImport: """ def __init__( - self, import_path: str, optional_package: OptionalPackages, *args, **kwargs + self, import_path: str, optional_package: OptionalPackages, *args, **kwargs ): if args or kwargs: raise TypeError( diff --git a/adalflow/poetry.lock b/adalflow/poetry.lock index f24854be..ae8445c2 100644 --- a/adalflow/poetry.lock +++ b/adalflow/poetry.lock @@ -251,7 +251,7 @@ files = [ name = "boto3" version = "1.35.19" description = "The AWS SDK for Python" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "boto3-1.35.19-py3-none-any.whl", hash = "sha256:84b3fe1727945bc3cada832d969ddb3dc0d08fce1677064ca8bdc13a89c1a143"}, @@ -2206,12 +2206,13 @@ files = [ [[package]] name = "nvidia-cudnn-cu12" -version = "8.9.2.26" +version = "9.1.0.70" description = "cuDNN runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl", hash = "sha256:5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9"}, + {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"}, + {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"}, ] [package.dependencies] @@ -2271,12 +2272,13 @@ nvidia-nvjitlink-cu12 = "*" [[package]] name = "nvidia-nccl-cu12" -version = "2.19.3" +version = "2.20.5" description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:a9734707a2c96443331c1e48c717024aa6678a0e2a4cb66b2c364d18cee6b48d"}, + {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"}, + {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"}, ] [[package]] @@ -3130,7 +3132,7 @@ pyasn1 = ">=0.1.3" name = "s3transfer" version = "0.10.2" description = "An Amazon S3 Transfer Manager" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, @@ -3521,36 +3523,31 @@ files = [ [[package]] name = "torch" -version = "2.2.2" +version = "2.4.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = false python-versions = ">=3.8.0" files = [ - {file = "torch-2.2.2-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:bc889d311a855dd2dfd164daf8cc903a6b7273a747189cebafdd89106e4ad585"}, - {file = "torch-2.2.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:15dffa4cc3261fa73d02f0ed25f5fa49ecc9e12bf1ae0a4c1e7a88bbfaad9030"}, - {file = "torch-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:11e8fe261233aeabd67696d6b993eeb0896faa175c6b41b9a6c9f0334bdad1c5"}, - {file = "torch-2.2.2-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:b2e2200b245bd9f263a0d41b6a2dab69c4aca635a01b30cca78064b0ef5b109e"}, - {file = "torch-2.2.2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:877b3e6593b5e00b35bbe111b7057464e76a7dd186a287280d941b564b0563c2"}, - {file = "torch-2.2.2-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:ad4c03b786e074f46606f4151c0a1e3740268bcf29fbd2fdf6666d66341c1dcb"}, - {file = "torch-2.2.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:32827fa1fbe5da8851686256b4cd94cc7b11be962862c2293811c94eea9457bf"}, - {file = "torch-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:f9ef0a648310435511e76905f9b89612e45ef2c8b023bee294f5e6f7e73a3e7c"}, - {file = "torch-2.2.2-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:95b9b44f3bcebd8b6cd8d37ec802048c872d9c567ba52c894bba90863a439059"}, - {file = "torch-2.2.2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:49aa4126ede714c5aeef7ae92969b4b0bbe67f19665106463c39f22e0a1860d1"}, - {file = "torch-2.2.2-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:cf12cdb66c9c940227ad647bc9cf5dba7e8640772ae10dfe7569a0c1e2a28aca"}, - {file = "torch-2.2.2-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:89ddac2a8c1fb6569b90890955de0c34e1724f87431cacff4c1979b5f769203c"}, - {file = "torch-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:451331406b760f4b1ab298ddd536486ab3cfb1312614cfe0532133535be60bea"}, - {file = "torch-2.2.2-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:eb4d6e9d3663e26cd27dc3ad266b34445a16b54908e74725adb241aa56987533"}, - {file = "torch-2.2.2-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:bf9558da7d2bf7463390b3b2a61a6a3dbb0b45b161ee1dd5ec640bf579d479fc"}, - {file = "torch-2.2.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd2bf7697c9e95fb5d97cc1d525486d8cf11a084c6af1345c2c2c22a6b0029d0"}, - {file = "torch-2.2.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b421448d194496e1114d87a8b8d6506bce949544e513742b097e2ab8f7efef32"}, - {file = "torch-2.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:3dbcd563a9b792161640c0cffe17e3270d85e8f4243b1f1ed19cca43d28d235b"}, - {file = "torch-2.2.2-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:31f4310210e7dda49f1fb52b0ec9e59382cfcb938693f6d5378f25b43d7c1d29"}, - {file = "torch-2.2.2-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:c795feb7e8ce2e0ef63f75f8e1ab52e7fd5e1a4d7d0c31367ade1e3de35c9e95"}, - {file = "torch-2.2.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a6e5770d68158d07456bfcb5318b173886f579fdfbf747543901ce718ea94782"}, - {file = "torch-2.2.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:67dcd726edff108e2cd6c51ff0e416fd260c869904de95750e80051358680d24"}, - {file = "torch-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:539d5ef6c4ce15bd3bd47a7b4a6e7c10d49d4d21c0baaa87c7d2ef8698632dfb"}, - {file = "torch-2.2.2-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:dff696de90d6f6d1e8200e9892861fd4677306d0ef604cb18f2134186f719f82"}, - {file = "torch-2.2.2-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:3a4dd910663fd7a124c056c878a52c2b0be4a5a424188058fe97109d4436ee42"}, + {file = "torch-2.4.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:362f82e23a4cd46341daabb76fba08f04cd646df9bfaf5da50af97cb60ca4971"}, + {file = "torch-2.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e8ac1985c3ff0f60d85b991954cfc2cc25f79c84545aead422763148ed2759e3"}, + {file = "torch-2.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91e326e2ccfb1496e3bee58f70ef605aeb27bd26be07ba64f37dcaac3d070ada"}, + {file = "torch-2.4.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:d36a8ef100f5bff3e9c3cea934b9e0d7ea277cb8210c7152d34a9a6c5830eadd"}, + {file = "torch-2.4.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0b5f88afdfa05a335d80351e3cea57d38e578c8689f751d35e0ff36bce872113"}, + {file = "torch-2.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ef503165f2341942bfdf2bd520152f19540d0c0e34961232f134dc59ad435be8"}, + {file = "torch-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:092e7c2280c860eff762ac08c4bdcd53d701677851670695e0c22d6d345b269c"}, + {file = "torch-2.4.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:ddddbd8b066e743934a4200b3d54267a46db02106876d21cf31f7da7a96f98ea"}, + {file = "torch-2.4.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:fdc4fe11db3eb93c1115d3e973a27ac7c1a8318af8934ffa36b0370efe28e042"}, + {file = "torch-2.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:18835374f599207a9e82c262153c20ddf42ea49bc76b6eadad8e5f49729f6e4d"}, + {file = "torch-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:ebea70ff30544fc021d441ce6b219a88b67524f01170b1c538d7d3ebb5e7f56c"}, + {file = "torch-2.4.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:72b484d5b6cec1a735bf3fa5a1c4883d01748698c5e9cfdbeb4ffab7c7987e0d"}, + {file = "torch-2.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c99e1db4bf0c5347107845d715b4aa1097e601bdc36343d758963055e9599d93"}, + {file = "torch-2.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b57f07e92858db78c5b72857b4f0b33a65b00dc5d68e7948a8494b0314efb880"}, + {file = "torch-2.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:f18197f3f7c15cde2115892b64f17c80dbf01ed72b008020e7da339902742cf6"}, + {file = "torch-2.4.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:5fc1d4d7ed265ef853579caf272686d1ed87cebdcd04f2a498f800ffc53dab71"}, + {file = "torch-2.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:40f6d3fe3bae74efcf08cb7f8295eaddd8a838ce89e9d26929d4edd6d5e4329d"}, + {file = "torch-2.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c9299c16c9743001ecef515536ac45900247f4338ecdf70746f2461f9e4831db"}, + {file = "torch-2.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:6bce130f2cd2d52ba4e2c6ada461808de7e5eccbac692525337cfb4c19421846"}, + {file = "torch-2.4.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:a38de2803ee6050309aac032676536c3d3b6a9804248537e38e098d0e14817ec"}, ] [package.dependencies] @@ -3562,20 +3559,21 @@ nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linu nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "8.9.2.26", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cudnn-cu12 = {version = "9.1.0.70", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +setuptools = "*" sympy = "*" -triton = {version = "2.2.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""} +triton = {version = "3.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""} typing-extensions = ">=4.8.0" [package.extras] opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.9.1)"] +optree = ["optree (>=0.11.0)"] [[package]] name = "tqdm" @@ -3599,17 +3597,21 @@ telegram = ["requests"] [[package]] name = "triton" -version = "2.2.0" +version = "3.0.0" description = "A language and compiler for custom Deep Learning operations" optional = false python-versions = "*" files = [ - {file = "triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2294514340cfe4e8f4f9e5c66c702744c4a117d25e618bd08469d0bfed1e2e5"}, - {file = "triton-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da58a152bddb62cafa9a857dd2bc1f886dbf9f9c90a2b5da82157cd2b34392b0"}, - {file = "triton-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af58716e721460a61886668b205963dc4d1e4ac20508cc3f623aef0d70283d5"}, - {file = "triton-2.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8fe46d3ab94a8103e291bd44c741cc294b91d1d81c1a2888254cbf7ff846dab"}, - {file = "triton-2.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ce26093e539d727e7cf6f6f0d932b1ab0574dc02567e684377630d86723ace"}, - {file = "triton-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:227cc6f357c5efcb357f3867ac2a8e7ecea2298cd4606a8ba1e931d1d5a947df"}, + {file = "triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a"}, + {file = "triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c"}, + {file = "triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb"}, + {file = "triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bcbf3b1c48af6a28011a5c40a5b3b9b5330530c3827716b5fbf6d7adcc1e53e9"}, + {file = "triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6e5727202f7078c56f91ff13ad0c1abab14a0e7f2c87e91b12b6f64f3e8ae609"}, + {file = "triton-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b052da883351fdf6be3d93cedae6db3b8e3988d3b09ed221bccecfa9612230"}, + {file = "triton-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd34f19a8582af96e6291d4afce25dac08cb2a5d218c599163761e8e0827208e"}, + {file = "triton-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d5e10de8c011adeb7c878c6ce0dd6073b14367749e34467f1cff2bde1b78253"}, + {file = "triton-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8903767951bf86ec960b4fe4e21bc970055afc65e9d57e916d79ae3c93665e3"}, + {file = "triton-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41004fb1ae9a53fcb3e970745feb87f0e3c94c6ce1ba86e95fa3b8537894bef7"}, ] [package.dependencies] @@ -3617,8 +3619,8 @@ filelock = "*" [package.extras] build = ["cmake (>=3.20)", "lit"] -tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)", "torch"] -tutorials = ["matplotlib", "pandas", "tabulate", "torch"] +tests = ["autopep8", "flake8", "isort", "llnl-hatchet", "numpy", "pytest", "scipy (>=1.7.1)"] +tutorials = ["matplotlib", "pandas", "tabulate"] [[package]] name = "types-pyyaml" @@ -4056,5 +4058,4 @@ torch = ["torch"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" - -content-hash = "04e8c5a6806fa7e3a34f322dd63ba242cef635929360f57d126082edd1d9987c" +content-hash = "9c7a0522920a4f485ebce56c7f5b7da0674523933e1ca9f167fa26650423da2a" From d0281cfcbac17798b865c43130ed8976d05f99f0 Mon Sep 17 00:00:00 2001 From: Li Yin Date: Sun, 6 Oct 2024 14:56:34 +0800 Subject: [PATCH 10/40] update --- .../components/model_client/__init__.py | 2 +- .../components/model_client/azureai_client.py | 136 ++++--- adalflow/adalflow/utils/lazy_import.py | 73 +++- adalflow/poetry.lock | 373 +++++++++++++++--- adalflow/pyproject.toml | 16 +- adalflow/tests/test_AzureClient.py | 174 ++++---- adalflow/tests/test_lazy_import.py | 109 ++++- docs/poetry.lock | 148 ++++++- docs/pyproject.toml | 7 +- 9 files changed, 837 insertions(+), 201 deletions(-) diff --git a/adalflow/adalflow/components/model_client/__init__.py b/adalflow/adalflow/components/model_client/__init__.py index 64db136f..ae508ece 100644 --- a/adalflow/adalflow/components/model_client/__init__.py +++ b/adalflow/adalflow/components/model_client/__init__.py @@ -34,7 +34,7 @@ ) BedrockAPIClient = LazyImport( "adalflow.components.model_client.bedrock_client.BedrockAPIClient", - OptionalPackages.BEDROCK, + OptionalPackages.BOTO3, ) GroqAPIClient = LazyImport( "adalflow.components.model_client.groq_client.GroqAPIClient", diff --git a/adalflow/adalflow/components/model_client/azureai_client.py b/adalflow/adalflow/components/model_client/azureai_client.py index 70699681..28ef0e05 100644 --- a/adalflow/adalflow/components/model_client/azureai_client.py +++ b/adalflow/adalflow/components/model_client/azureai_client.py @@ -21,10 +21,23 @@ # optional import from adalflow.utils.lazy_import import safe_import, OptionalPackages +import sys openai = safe_import(OptionalPackages.OPENAI.value[0], OptionalPackages.OPENAI.value[1]) +# Importing all Azure packages together +azure_modules = safe_import( + OptionalPackages.AZURE.value[0], # List of package names + OptionalPackages.AZURE.value[1], # Error message +) +# Manually add each module to sys.modules to make them available globally as if imported normally +azure_module_names = OptionalPackages.AZURE.value[0] +for name, module in zip(azure_module_names, azure_modules): + sys.modules[name] = module + +# Use the modules as if they were imported normally from azure.identity import DefaultAzureCredential, get_bearer_token_provider -from azure.core.credentials import AccessToken + +# from azure.core.credentials import AccessToken from openai import AzureOpenAI, AsyncAzureOpenAI, Stream from openai import ( APITimeoutError, @@ -53,6 +66,11 @@ T = TypeVar("T") +__all__ = ["AzureAIClient"] + +# TODO: this overlaps with openai client largely, might need to refactor to subclass openai client to simplify the code + + # completion parsing functions and you can combine them into one singple chat completion parser def get_first_message_content(completion: ChatCompletion) -> str: r"""When we only need the content of the first message. @@ -102,12 +120,12 @@ class AzureAIClient(ModelClient): A client wrapper for interacting with Azure OpenAI's API. This class provides support for both embedding and chat completion API calls. - Users can use this class to simplify their interactions with Azure OpenAI models + Users can use this class to simplify their interactions with Azure OpenAI models through the `Embedder` and `Generator` components. **Initialization:** - You can initialize the `AzureAIClient` with either an API key or Azure Active Directory (AAD) token + You can initialize the `AzureAIClient` with either an API key or Azure Active Directory (AAD) token authentication. It is recommended to set environment variables for sensitive data like API keys. Args: @@ -131,37 +149,38 @@ class AzureAIClient(ModelClient): - **Using Azure AD Token:** Ensure you have configured Azure AD credentials. The `DefaultAzureCredential` will automatically use your configured credentials. - **Example Usage:**do + **Example Usage:** - ```python - from azure.identity import DefaultAzureCredential - from your_module import AzureAIClient # Adjust import based on your module name + .. code-block:: python - # Initialize with API key - client = AzureAIClient( - api_key="your_api_key", - api_version="2023-05-15", - azure_endpoint="https://your-endpoint.openai.azure.com/" - ) + from azure.identity import DefaultAzureCredential + from your_module import AzureAIClient # Adjust import based on your module name - # Or initialize with Azure AD token - client = AzureAIClient( - api_version="2023-05-15", - azure_endpoint="https://your-endpoint.openai.azure.com/", - credential=DefaultAzureCredential() - ) + # Initialize with API key + client = AzureAIClient( + api_key="your_api_key", + api_version="2023-05-15", + azure_endpoint="https://your-endpoint.openai.azure.com/" + ) + + # Or initialize with Azure AD token + client = AzureAIClient( + api_version="2023-05-15", + azure_endpoint="https://your-endpoint.openai.azure.com/", + credential=DefaultAzureCredential() + ) + + # Example call to the chat completion API + api_kwargs = { + "model": "gpt-3.5-turbo", + "messages": [{"role": "user", "content": "What is the meaning of life?"}], + "stream": True + } + response = client.call(api_kwargs=api_kwargs, model_type=ModelType.LLM) - # Example call to the chat completion API - api_kwargs = { - "model": "gpt-3.5-turbo", - "messages": [{"role": "user", "content": "What is the meaning of life?"}], - "stream": True - } - response = client.call(api_kwargs=api_kwargs, model_type=ModelType.LLM) + for chunk in response: + print(chunk) - for chunk in response: - print(chunk) - ``` **Notes:** - Ensure that the API key or credentials are correctly set up and accessible to avoid authentication errors. @@ -176,17 +195,17 @@ class AzureAIClient(ModelClient): def __init__( self, api_key: Optional[str] = None, - api_version:Optional[str]=None, - azure_endpoint: Optional[str]= None, + api_version: Optional[str] = None, + azure_endpoint: Optional[str] = None, credential: Optional[DefaultAzureCredential] = None, chat_completion_parser: Callable[[Completion], Any] = None, input_type: Literal["text", "messages"] = "text", ): r"""It is recommended to set the API_KEY into the environment variable instead of passing it as an argument. - + Initializes the Azure OpenAI client with either API key or AAD token authentication. - + Args: api_key: Azure OpenAI API key. api_version: Azure OpenAI API version. @@ -194,14 +213,14 @@ def __init__( credential: Azure AD credential for token-based authentication. chat_completion_parser: Function to parse chat completions. input_type: Input format, either "text" or "messages". - + """ super().__init__() # added api_type azure for azure Ai self.api_type = "azure" self._api_key = api_key - self._apiversion= api_version + self._apiversion = api_version self._azure_endpoint = azure_endpoint self._credential = credential self.sync_client = self.init_sync_client() @@ -215,40 +234,59 @@ def init_sync_client(self): api_key = self._api_key or os.getenv("AZURE_OPENAI_API_KEY") azure_endpoint = self._azure_endpoint or os.getenv("AZURE_OPENAI_ENDPOINT") api_version = self._apiversion or os.getenv("AZURE_OPENAI_VERSION") - credential = self._credential or DefaultAzureCredential + # credential = self._credential or DefaultAzureCredential if not azure_endpoint: raise ValueError("Environment variable AZURE_OPENAI_ENDPOINT must be set") if not api_version: raise ValueError("Environment variable AZURE_OPENAI_VERSION must be set") - + if api_key: - return AzureOpenAI(api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version) + return AzureOpenAI( + api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version + ) elif self._credential: # credential = DefaultAzureCredential() - token_provider = get_bearer_token_provider(DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default") - return AzureOpenAI(azure_ad_token_provider=token_provider, azure_endpoint=azure_endpoint, api_version=api_version) + token_provider = get_bearer_token_provider( + DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default" + ) + return AzureOpenAI( + azure_ad_token_provider=token_provider, + azure_endpoint=azure_endpoint, + api_version=api_version, + ) else: - raise ValueError("Environment variable AZURE_OPENAI_API_KEY must be set or credential must be provided") - + raise ValueError( + "Environment variable AZURE_OPENAI_API_KEY must be set or credential must be provided" + ) def init_async_client(self): api_key = self._api_key or os.getenv("AZURE_OPENAI_API_KEY") azure_endpoint = self._azure_endpoint or os.getenv("AZURE_OPENAI_ENDPOINT") api_version = self._apiversion or os.getenv("AZURE_OPENAI_VERSION") - credential = self._credential or DefaultAzureCredential() + # credential = self._credential or DefaultAzureCredential() if not azure_endpoint: - raise ValueError("Environment variable AZURE_OPENAI_ENDPOINT must be set") + raise ValueError("Environment variable AZURE_OPENAI_ENDPOINT must be set") if not api_version: raise ValueError("Environment variable AZURE_OPENAI_VERSION must be set") - + if api_key: - return AsyncAzureOpenAI(api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version) + return AsyncAzureOpenAI( + api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version + ) elif self._credential: # credential = DefaultAzureCredential() - token_provider = get_bearer_token_provider(DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default") - return AsyncAzureOpenAI(azure_ad_token_provider=token_provider, azure_endpoint=azure_endpoint, api_version=api_version) + token_provider = get_bearer_token_provider( + DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default" + ) + return AsyncAzureOpenAI( + azure_ad_token_provider=token_provider, + azure_endpoint=azure_endpoint, + api_version=api_version, + ) else: - raise ValueError("Environment variable AZURE_OPENAI_API_KEY must be set or credential must be provided") + raise ValueError( + "Environment variable AZURE_OPENAI_API_KEY must be set or credential must be provided" + ) # def _parse_chat_completion(self, completion: ChatCompletion) -> "GeneratorOutput": # # TODO: raw output it is better to save the whole completion as a source of truth instead of just the message diff --git a/adalflow/adalflow/utils/lazy_import.py b/adalflow/adalflow/utils/lazy_import.py index fee070c5..9dac147c 100644 --- a/adalflow/adalflow/utils/lazy_import.py +++ b/adalflow/adalflow/utils/lazy_import.py @@ -1,5 +1,6 @@ """Lazy import a module and class.""" +from typing import List, Union import importlib import logging from types import ModuleType @@ -13,12 +14,36 @@ class OptionalPackages(Enum): __doc__ = r"""Enum for optional packages that can be used in the library. The package name and error message are defined for each optional package as a tuple. + + The value of the tuple: + - The package name (str): The package name to import. Follows the right syntax: such as import azure.identity but the package itself is azure-identity. + Support a list of package names for related packages. This will be importing a list of packages while safe_import is used. + - The error message (str): The message to display if the package is not found. + + Example of using multiple related packages: + + .. code-block:: python + + from adalflow.utils.lazy_import import safe_import, OptionalPackages + import sys + + azure_modules = safe_import( + OptionalPackages.AZURE.value[0], # List of package names + OptionalPackages.AZURE.value[1], # Error message + ) + # Manually add each module to sys.modules to make them available globally as if imported normally + azure_module_names = OptionalPackages.AZURE.value[0] + for name, module in zip(azure_module_names, azure_modules): + sys.modules[name] = module + + # Use the modules as if they were imported normally + from azure.identity import DefaultAzureCredential, get_bearer_token_provider """ # model sdk GROQ = ("groq", "Please install groq with: pip install groq") OPENAI = ("openai", "Please install openai with: pip install openai") ANTHROPIC = ("anthropic", "Please install anthropic with: pip install anthropic") - BEDROCK = ("bedrock", "Please install boto3 with: pip install boto3") + BOTO3 = ("boto3", "Please install boto3 with: pip install boto3") GOOGLE_GENERATIVEAI = ( "google.generativeai", "Please install google-generativeai with: pip install google-generativeai", @@ -32,8 +57,16 @@ class OptionalPackages(Enum): # modeling library TORCH = ("torch", "Please install torch with: pip install torch") - - AZURE_AUTH_SERVICES = ("AZURE_AUTH_SERVICES", "Please install AZURE_AUTH_SERVICES with: pip install azure-identity azure-core") + # Grouping all Azure-related packages under one entry + AZURE = ( + [ + "azure.identity", + "azure.core", + # "azure.ai-formrecognizer", + # "azure.ai-textanalytics", + ], + "Please install Azure packages with: pip install azure-identity azure-core azure-ai-formrecognizer azure-ai-textanalytics", + ) # search library FAISS = ( "faiss", @@ -80,7 +113,7 @@ class LazyImport: """ def __init__( - self, import_path: str, optional_package: OptionalPackages, *args, **kwargs + self, import_path: str, optional_package: OptionalPackages, *args, **kwargs ): if args or kwargs: raise TypeError( @@ -123,10 +156,23 @@ def __call__(self, *args, **kwargs): return self.class_(*args, **kwargs) -def safe_import(module_name: str, install_message: str) -> ModuleType: +def safe_import( + module_names: Union[str, List[str]], install_message: str +) -> Union[ModuleType, List[ModuleType]]: """Safely import a module and raise an ImportError with the install message if the module is not found. - Mainly used internally to import optional packages only when needed. + Handles importing of multiple related packages. + + Args: + module_names (list or str): The package name(s) to import. + install_message (str): The message to display if import fails. + + Returns: + ModuleType: The imported module. + + Raises: + ImportError: If any of the packages are not found. + Example: @@ -152,7 +198,14 @@ def safe_import(module_name: str, install_message: str) -> ModuleType: numpy = safe_import(OptionalPackages.NUMPY.value[0], OptionalPackages.NUMPY.value[1]) """ - try: - return importlib.import_module(module_name) - except ImportError: - raise ImportError(f"{install_message}") + if isinstance(module_names, str): + module_names = [module_names] + + return_modules = [] + for module_name in module_names: + try: + return_modules.append(importlib.import_module(module_name)) + except ImportError: + raise ImportError(f"{install_message}") + + return return_modules[0] if len(return_modules) == 1 else return_modules diff --git a/adalflow/poetry.lock b/adalflow/poetry.lock index f24854be..2d3f82cf 100644 --- a/adalflow/poetry.lock +++ b/adalflow/poetry.lock @@ -236,6 +236,43 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +[[package]] +name = "azure-core" +version = "1.31.0" +description = "Microsoft Azure Core Library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "azure_core-1.31.0-py3-none-any.whl", hash = "sha256:22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd"}, + {file = "azure_core-1.31.0.tar.gz", hash = "sha256:656a0dd61e1869b1506b7c6a3b31d62f15984b1a573d6326f6aa2f3e4123284b"}, +] + +[package.dependencies] +requests = ">=2.21.0" +six = ">=1.11.0" +typing-extensions = ">=4.6.0" + +[package.extras] +aio = ["aiohttp (>=3.0)"] + +[[package]] +name = "azure-identity" +version = "1.18.0" +description = "Microsoft Azure Identity Library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "azure_identity-1.18.0-py3-none-any.whl", hash = "sha256:bccf6106245b49ff41d0c4cd7b72851c5a2ba3a32cef7589da246f5727f26f02"}, + {file = "azure_identity-1.18.0.tar.gz", hash = "sha256:f567579a65d8932fa913c76eddf3305101a15e5727a5e4aa5df649a0f553d4c3"}, +] + +[package.dependencies] +azure-core = ">=1.31.0" +cryptography = ">=2.5" +msal = ">=1.30.0" +msal-extensions = ">=1.2.0" +typing-extensions = ">=4.0.0" + [[package]] name = "backoff" version = "2.2.1" @@ -251,7 +288,7 @@ files = [ name = "boto3" version = "1.35.19" description = "The AWS SDK for Python" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "boto3-1.35.19-py3-none-any.whl", hash = "sha256:84b3fe1727945bc3cada832d969ddb3dc0d08fce1677064ca8bdc13a89c1a143"}, @@ -310,6 +347,85 @@ files = [ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfgv" version = "3.4.0" @@ -539,6 +655,55 @@ mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pil test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] +[[package]] +name = "cryptography" +version = "43.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "cycler" version = "0.12.1" @@ -1889,6 +2054,40 @@ docs = ["sphinx"] gmpy = ["gmpy2 (>=2.1.0a4)"] tests = ["pytest (>=4.6)"] +[[package]] +name = "msal" +version = "1.31.0" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." +optional = false +python-versions = ">=3.7" +files = [ + {file = "msal-1.31.0-py3-none-any.whl", hash = "sha256:96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7"}, + {file = "msal-1.31.0.tar.gz", hash = "sha256:2c4f189cf9cc8f00c80045f66d39b7c0f3ed45873fd3d1f2af9f22db2e12ff4b"}, +] + +[package.dependencies] +cryptography = ">=2.5,<46" +PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +requests = ">=2.0.0,<3" + +[package.extras] +broker = ["pymsalruntime (>=0.14,<0.18)", "pymsalruntime (>=0.17,<0.18)"] + +[[package]] +name = "msal-extensions" +version = "1.2.0" +description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." +optional = false +python-versions = ">=3.7" +files = [ + {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, + {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, +] + +[package.dependencies] +msal = ">=1.29,<2" +portalocker = ">=1.4,<3" + [[package]] name = "multidict" version = "6.1.0" @@ -2206,12 +2405,13 @@ files = [ [[package]] name = "nvidia-cudnn-cu12" -version = "8.9.2.26" +version = "9.1.0.70" description = "cuDNN runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl", hash = "sha256:5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9"}, + {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"}, + {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"}, ] [package.dependencies] @@ -2271,12 +2471,13 @@ nvidia-nvjitlink-cu12 = "*" [[package]] name = "nvidia-nccl-cu12" -version = "2.19.3" +version = "2.20.5" description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:a9734707a2c96443331c1e48c717024aa6678a0e2a4cb66b2c364d18cee6b48d"}, + {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"}, + {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"}, ] [[package]] @@ -2579,6 +2780,25 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "portalocker" +version = "2.10.1" +description = "Wraps the portalocker recipe for easy usage" +optional = false +python-versions = ">=3.8" +files = [ + {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, + {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] + [[package]] name = "pre-commit" version = "3.8.0" @@ -2710,6 +2930,17 @@ files = [ [package.dependencies] pyasn1 = ">=0.4.6,<0.7.0" +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pydantic" version = "2.9.1" @@ -2834,6 +3065,26 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pyjwt" +version = "2.9.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pyparsing" version = "3.1.4" @@ -2926,6 +3177,33 @@ files = [ {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] +[[package]] +name = "pywin32" +version = "307" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-307-cp310-cp310-win32.whl", hash = "sha256:f8f25d893c1e1ce2d685ef6d0a481e87c6f510d0f3f117932781f412e0eba31b"}, + {file = "pywin32-307-cp310-cp310-win_amd64.whl", hash = "sha256:36e650c5e5e6b29b5d317385b02d20803ddbac5d1031e1f88d20d76676dd103d"}, + {file = "pywin32-307-cp310-cp310-win_arm64.whl", hash = "sha256:0c12d61e0274e0c62acee79e3e503c312426ddd0e8d4899c626cddc1cafe0ff4"}, + {file = "pywin32-307-cp311-cp311-win32.whl", hash = "sha256:fec5d27cc893178fab299de911b8e4d12c5954e1baf83e8a664311e56a272b75"}, + {file = "pywin32-307-cp311-cp311-win_amd64.whl", hash = "sha256:987a86971753ed7fdd52a7fb5747aba955b2c7fbbc3d8b76ec850358c1cc28c3"}, + {file = "pywin32-307-cp311-cp311-win_arm64.whl", hash = "sha256:fd436897c186a2e693cd0437386ed79f989f4d13d6f353f8787ecbb0ae719398"}, + {file = "pywin32-307-cp312-cp312-win32.whl", hash = "sha256:07649ec6b01712f36debf39fc94f3d696a46579e852f60157a729ac039df0815"}, + {file = "pywin32-307-cp312-cp312-win_amd64.whl", hash = "sha256:00d047992bb5dcf79f8b9b7c81f72e0130f9fe4b22df613f755ab1cc021d8347"}, + {file = "pywin32-307-cp312-cp312-win_arm64.whl", hash = "sha256:b53658acbfc6a8241d72cc09e9d1d666be4e6c99376bc59e26cdb6223c4554d2"}, + {file = "pywin32-307-cp313-cp313-win32.whl", hash = "sha256:ea4d56e48dc1ab2aa0a5e3c0741ad6e926529510516db7a3b6981a1ae74405e5"}, + {file = "pywin32-307-cp313-cp313-win_amd64.whl", hash = "sha256:576d09813eaf4c8168d0bfd66fb7cb3b15a61041cf41598c2db4a4583bf832d2"}, + {file = "pywin32-307-cp313-cp313-win_arm64.whl", hash = "sha256:b30c9bdbffda6a260beb2919f918daced23d32c79109412c2085cbc513338a0a"}, + {file = "pywin32-307-cp37-cp37m-win32.whl", hash = "sha256:5101472f5180c647d4525a0ed289ec723a26231550dbfd369ec19d5faf60e511"}, + {file = "pywin32-307-cp37-cp37m-win_amd64.whl", hash = "sha256:05de55a7c110478dc4b202230e98af5e0720855360d2b31a44bb4e296d795fba"}, + {file = "pywin32-307-cp38-cp38-win32.whl", hash = "sha256:13d059fb7f10792542082f5731d5d3d9645320fc38814759313e5ee97c3fac01"}, + {file = "pywin32-307-cp38-cp38-win_amd64.whl", hash = "sha256:7e0b2f93769d450a98ac7a31a087e07b126b6d571e8b4386a5762eb85325270b"}, + {file = "pywin32-307-cp39-cp39-win32.whl", hash = "sha256:55ee87f2f8c294e72ad9d4261ca423022310a6e79fb314a8ca76ab3f493854c6"}, + {file = "pywin32-307-cp39-cp39-win_amd64.whl", hash = "sha256:e9d5202922e74985b037c9ef46778335c102b74b95cec70f629453dbe7235d87"}, +] + [[package]] name = "pyyaml" version = "6.0.2" @@ -3130,7 +3408,7 @@ pyasn1 = ">=0.1.3" name = "s3transfer" version = "0.10.2" description = "An Amazon S3 Transfer Manager" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, @@ -3521,36 +3799,31 @@ files = [ [[package]] name = "torch" -version = "2.2.2" +version = "2.4.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = false python-versions = ">=3.8.0" files = [ - {file = "torch-2.2.2-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:bc889d311a855dd2dfd164daf8cc903a6b7273a747189cebafdd89106e4ad585"}, - {file = "torch-2.2.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:15dffa4cc3261fa73d02f0ed25f5fa49ecc9e12bf1ae0a4c1e7a88bbfaad9030"}, - {file = "torch-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:11e8fe261233aeabd67696d6b993eeb0896faa175c6b41b9a6c9f0334bdad1c5"}, - {file = "torch-2.2.2-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:b2e2200b245bd9f263a0d41b6a2dab69c4aca635a01b30cca78064b0ef5b109e"}, - {file = "torch-2.2.2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:877b3e6593b5e00b35bbe111b7057464e76a7dd186a287280d941b564b0563c2"}, - {file = "torch-2.2.2-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:ad4c03b786e074f46606f4151c0a1e3740268bcf29fbd2fdf6666d66341c1dcb"}, - {file = "torch-2.2.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:32827fa1fbe5da8851686256b4cd94cc7b11be962862c2293811c94eea9457bf"}, - {file = "torch-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:f9ef0a648310435511e76905f9b89612e45ef2c8b023bee294f5e6f7e73a3e7c"}, - {file = "torch-2.2.2-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:95b9b44f3bcebd8b6cd8d37ec802048c872d9c567ba52c894bba90863a439059"}, - {file = "torch-2.2.2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:49aa4126ede714c5aeef7ae92969b4b0bbe67f19665106463c39f22e0a1860d1"}, - {file = "torch-2.2.2-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:cf12cdb66c9c940227ad647bc9cf5dba7e8640772ae10dfe7569a0c1e2a28aca"}, - {file = "torch-2.2.2-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:89ddac2a8c1fb6569b90890955de0c34e1724f87431cacff4c1979b5f769203c"}, - {file = "torch-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:451331406b760f4b1ab298ddd536486ab3cfb1312614cfe0532133535be60bea"}, - {file = "torch-2.2.2-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:eb4d6e9d3663e26cd27dc3ad266b34445a16b54908e74725adb241aa56987533"}, - {file = "torch-2.2.2-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:bf9558da7d2bf7463390b3b2a61a6a3dbb0b45b161ee1dd5ec640bf579d479fc"}, - {file = "torch-2.2.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd2bf7697c9e95fb5d97cc1d525486d8cf11a084c6af1345c2c2c22a6b0029d0"}, - {file = "torch-2.2.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b421448d194496e1114d87a8b8d6506bce949544e513742b097e2ab8f7efef32"}, - {file = "torch-2.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:3dbcd563a9b792161640c0cffe17e3270d85e8f4243b1f1ed19cca43d28d235b"}, - {file = "torch-2.2.2-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:31f4310210e7dda49f1fb52b0ec9e59382cfcb938693f6d5378f25b43d7c1d29"}, - {file = "torch-2.2.2-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:c795feb7e8ce2e0ef63f75f8e1ab52e7fd5e1a4d7d0c31367ade1e3de35c9e95"}, - {file = "torch-2.2.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a6e5770d68158d07456bfcb5318b173886f579fdfbf747543901ce718ea94782"}, - {file = "torch-2.2.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:67dcd726edff108e2cd6c51ff0e416fd260c869904de95750e80051358680d24"}, - {file = "torch-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:539d5ef6c4ce15bd3bd47a7b4a6e7c10d49d4d21c0baaa87c7d2ef8698632dfb"}, - {file = "torch-2.2.2-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:dff696de90d6f6d1e8200e9892861fd4677306d0ef604cb18f2134186f719f82"}, - {file = "torch-2.2.2-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:3a4dd910663fd7a124c056c878a52c2b0be4a5a424188058fe97109d4436ee42"}, + {file = "torch-2.4.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:362f82e23a4cd46341daabb76fba08f04cd646df9bfaf5da50af97cb60ca4971"}, + {file = "torch-2.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e8ac1985c3ff0f60d85b991954cfc2cc25f79c84545aead422763148ed2759e3"}, + {file = "torch-2.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91e326e2ccfb1496e3bee58f70ef605aeb27bd26be07ba64f37dcaac3d070ada"}, + {file = "torch-2.4.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:d36a8ef100f5bff3e9c3cea934b9e0d7ea277cb8210c7152d34a9a6c5830eadd"}, + {file = "torch-2.4.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0b5f88afdfa05a335d80351e3cea57d38e578c8689f751d35e0ff36bce872113"}, + {file = "torch-2.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ef503165f2341942bfdf2bd520152f19540d0c0e34961232f134dc59ad435be8"}, + {file = "torch-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:092e7c2280c860eff762ac08c4bdcd53d701677851670695e0c22d6d345b269c"}, + {file = "torch-2.4.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:ddddbd8b066e743934a4200b3d54267a46db02106876d21cf31f7da7a96f98ea"}, + {file = "torch-2.4.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:fdc4fe11db3eb93c1115d3e973a27ac7c1a8318af8934ffa36b0370efe28e042"}, + {file = "torch-2.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:18835374f599207a9e82c262153c20ddf42ea49bc76b6eadad8e5f49729f6e4d"}, + {file = "torch-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:ebea70ff30544fc021d441ce6b219a88b67524f01170b1c538d7d3ebb5e7f56c"}, + {file = "torch-2.4.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:72b484d5b6cec1a735bf3fa5a1c4883d01748698c5e9cfdbeb4ffab7c7987e0d"}, + {file = "torch-2.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c99e1db4bf0c5347107845d715b4aa1097e601bdc36343d758963055e9599d93"}, + {file = "torch-2.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b57f07e92858db78c5b72857b4f0b33a65b00dc5d68e7948a8494b0314efb880"}, + {file = "torch-2.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:f18197f3f7c15cde2115892b64f17c80dbf01ed72b008020e7da339902742cf6"}, + {file = "torch-2.4.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:5fc1d4d7ed265ef853579caf272686d1ed87cebdcd04f2a498f800ffc53dab71"}, + {file = "torch-2.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:40f6d3fe3bae74efcf08cb7f8295eaddd8a838ce89e9d26929d4edd6d5e4329d"}, + {file = "torch-2.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c9299c16c9743001ecef515536ac45900247f4338ecdf70746f2461f9e4831db"}, + {file = "torch-2.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:6bce130f2cd2d52ba4e2c6ada461808de7e5eccbac692525337cfb4c19421846"}, + {file = "torch-2.4.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:a38de2803ee6050309aac032676536c3d3b6a9804248537e38e098d0e14817ec"}, ] [package.dependencies] @@ -3562,20 +3835,21 @@ nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linu nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "8.9.2.26", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cudnn-cu12 = {version = "9.1.0.70", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +setuptools = "*" sympy = "*" -triton = {version = "2.2.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""} +triton = {version = "3.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""} typing-extensions = ">=4.8.0" [package.extras] opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.9.1)"] +optree = ["optree (>=0.11.0)"] [[package]] name = "tqdm" @@ -3599,17 +3873,21 @@ telegram = ["requests"] [[package]] name = "triton" -version = "2.2.0" +version = "3.0.0" description = "A language and compiler for custom Deep Learning operations" optional = false python-versions = "*" files = [ - {file = "triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2294514340cfe4e8f4f9e5c66c702744c4a117d25e618bd08469d0bfed1e2e5"}, - {file = "triton-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da58a152bddb62cafa9a857dd2bc1f886dbf9f9c90a2b5da82157cd2b34392b0"}, - {file = "triton-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af58716e721460a61886668b205963dc4d1e4ac20508cc3f623aef0d70283d5"}, - {file = "triton-2.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8fe46d3ab94a8103e291bd44c741cc294b91d1d81c1a2888254cbf7ff846dab"}, - {file = "triton-2.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ce26093e539d727e7cf6f6f0d932b1ab0574dc02567e684377630d86723ace"}, - {file = "triton-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:227cc6f357c5efcb357f3867ac2a8e7ecea2298cd4606a8ba1e931d1d5a947df"}, + {file = "triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a"}, + {file = "triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c"}, + {file = "triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb"}, + {file = "triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bcbf3b1c48af6a28011a5c40a5b3b9b5330530c3827716b5fbf6d7adcc1e53e9"}, + {file = "triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6e5727202f7078c56f91ff13ad0c1abab14a0e7f2c87e91b12b6f64f3e8ae609"}, + {file = "triton-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b052da883351fdf6be3d93cedae6db3b8e3988d3b09ed221bccecfa9612230"}, + {file = "triton-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd34f19a8582af96e6291d4afce25dac08cb2a5d218c599163761e8e0827208e"}, + {file = "triton-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d5e10de8c011adeb7c878c6ce0dd6073b14367749e34467f1cff2bde1b78253"}, + {file = "triton-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8903767951bf86ec960b4fe4e21bc970055afc65e9d57e916d79ae3c93665e3"}, + {file = "triton-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41004fb1ae9a53fcb3e970745feb87f0e3c94c6ce1ba86e95fa3b8537894bef7"}, ] [package.dependencies] @@ -3617,8 +3895,8 @@ filelock = "*" [package.extras] build = ["cmake (>=3.20)", "lit"] -tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)", "torch"] -tutorials = ["matplotlib", "pandas", "tabulate", "torch"] +tests = ["autopep8", "flake8", "isort", "llnl-hatchet", "numpy", "pytest", "scipy (>=1.7.1)"] +tutorials = ["matplotlib", "pandas", "tabulate"] [[package]] name = "types-pyyaml" @@ -4042,6 +4320,8 @@ type = ["pytest-mypy"] [extras] anthropic = ["anthropic"] +azure = ["azure-core", "azure-identity"] +bedrock = ["boto3"] cohere = ["cohere"] datasets = [] faiss-cpu = ["faiss-cpu"] @@ -4056,5 +4336,4 @@ torch = ["torch"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" - -content-hash = "04e8c5a6806fa7e3a34f322dd63ba242cef635929360f57d126082edd1d9987c" +content-hash = "0a58b82476e4794adbc5768d38911e1935a9c3941cc40499001199383fc8c8ec" diff --git a/adalflow/pyproject.toml b/adalflow/pyproject.toml index 455d36cc..218c7b25 100644 --- a/adalflow/pyproject.toml +++ b/adalflow/pyproject.toml @@ -42,7 +42,7 @@ tqdm = "^4.66.4" pyyaml = "^6.0.1" nest-asyncio = "^1.6.0" colorama = "^0.4.6" -botocore = "^1.34.149" # if not specified, it will fail to resolve the dependency, we need to figure out a way to get rid of botocore dependency +botocore = "^1.34.149" # if not specified, it will fail to resolve the dependency, we need to figure out a way to get rid of botocore dependency diskcache = "^5.6.3" @@ -62,8 +62,10 @@ ollama = { version = "^0.2.1", optional = true } # Azure dependencies azure-core = { version = "^1.24.0", optional = true } azure-identity = { version = "^1.12.0", optional = true } -azure-ai-formrecognizer = { version = "^3.2.0", optional = true } -azure-ai-textanalytics = { version = "^5.3.0", optional = true } +# azure-ai-formrecognizer = { version = "^3.2.0", optional = true } +# azure-ai-textanalytics = { version = "^5.3.0", optional = true } + +# amazon bedrock boto3 = { version = "^1.35.19", optional = true } [tool.poetry.group.test.dependencies] @@ -88,7 +90,8 @@ graphviz = "^0.20.3" tensorboard = "^2.17.0" tensorboardx = "^2.6.2.2" matplotlib = "^3.9.1" - +azure-identity = "^1.18.0" +azure-core = "^1.31.0" [tool.poetry.group.extra.dependencies] @@ -108,10 +111,11 @@ ollama = ["ollama"] azure = [ "azure-core", "azure-identity", - "azure-ai-formrecognizer", - "azure-ai-textanalytics" + # "azure-ai-formrecognizer", + # "azure-ai-textanalytics", # will reevaluate the need for these two ] +bedrock = ["boto3"] datasets = ["datasets"] [[tool.poetry.source]] diff --git a/adalflow/tests/test_AzureClient.py b/adalflow/tests/test_AzureClient.py index 8bfc1ce7..14b7cf4a 100644 --- a/adalflow/tests/test_AzureClient.py +++ b/adalflow/tests/test_AzureClient.py @@ -1,17 +1,13 @@ import unittest -from unittest.mock import patch, MagicMock -from azure.identity import DefaultAzureCredential +from unittest.mock import patch from openai import AzureOpenAI -from openai.types import Completion, CreateEmbeddingResponse -from openai.types.chat import ChatCompletionChunk, ChatCompletion -from adalflow.core.model_client import ModelClient -from adalflow.core.types import ModelType, EmbedderOutput, TokenLogProb, CompletionUsage, GeneratorOutput -from adalflow.components.model_client.openai_client import AzureAIClient +from adalflow.components.model_client.azureai_client import AzureAIClient + class TestAzureAIClient(unittest.TestCase): - @patch('adalflow.components.model_client.openai_client.AzureOpenAI') - @patch('adalflow.components.model_client.openai_client.DefaultAzureCredential') + @patch("adalflow.components.model_client.azureai_client.AzureOpenAI") + @patch("adalflow.components.model_client.azureai_client.DefaultAzureCredential") def setUp(self, MockDefaultAzureCredential, MockAzureOpenAI): self.mock_credential = MockDefaultAzureCredential() self.mock_sync_client = MockAzureOpenAI.return_value @@ -24,96 +20,106 @@ def setUp(self, MockDefaultAzureCredential, MockAzureOpenAI): self.client.sync_client = self.mock_sync_client def test_init_sync_client_with_api_key(self): - client = AzureAIClient(api_key="test_key", api_version="v1", azure_endpoint="https://test.endpoint") + client = AzureAIClient( + api_key="test_key", api_version="v1", azure_endpoint="https://test.endpoint" + ) self.assertIsInstance(client.sync_client, AzureOpenAI) def test_init_sync_client_with_credential(self): client = AzureAIClient( api_version="v1", azure_endpoint="https://test.endpoint", - credential=self.mock_credential + credential=self.mock_credential, ) self.assertIsInstance(client.sync_client, AzureOpenAI) - @patch('adalflow.components.model_client.openai_client.AzureOpenAI') - def test_call_embeddings(self, MockAzureOpenAI): - mock_embeddings = MagicMock() - MockAzureOpenAI.return_value.embeddings.create = mock_embeddings - api_kwargs = {'input': ["test"]} - model_type = ModelType.EMBEDDER - self.client.call(api_kwargs=api_kwargs, model_type=model_type) - MockAzureOpenAI.return_value.embeddings.create.assert_called_once_with(**api_kwargs) + # @patch("adalflow.components.model_client.azureai_client.AzureOpenAI") + # def test_call_embeddings(self, MockAzureOpenAI): + # mock_embeddings = MagicMock() + # MockAzureOpenAI.return_value.embeddings.create = mock_embeddings + # api_kwargs = {"input": ["test"]} + # model_type = ModelType.EMBEDDER + # self.client.call(api_kwargs=api_kwargs, model_type=model_type) + # MockAzureOpenAI.return_value.embeddings.create.assert_called_once_with( + # **api_kwargs + # ) - @patch('adalflow.components.model_client.openai_client.AzureOpenAI') - def test_call_chat_completions(self, MockAzureOpenAI): - mock_chat_completions = MagicMock() - MockAzureOpenAI.return_value.chat.completions.create = mock_chat_completions - api_kwargs = {'input': "test"} - model_type = ModelType.LLM - self.client.call(api_kwargs=api_kwargs, model_type=model_type) - MockAzureOpenAI.return_value.chat.completions.create.assert_called_once_with(**api_kwargs) + # @patch("adalflow.components.model_client.azureai_client.AzureOpenAI") + # def test_call_chat_completions(self, MockAzureOpenAI): + # mock_chat_completions = MagicMock() + # MockAzureOpenAI.return_value.chat.completions.create = mock_chat_completions + # api_kwargs = {"input": "test"} + # model_type = ModelType.LLM + # self.client.call(api_kwargs=api_kwargs, model_type=model_type) + # MockAzureOpenAI.return_value.chat.completions.create.assert_called_once_with( + # **api_kwargs + # ) - @patch('adalflow.components.model_client.openai_client.AzureOpenAI') - def test_parse_chat_completion(self, MockAzureOpenAI): - mock_chat_completion = MagicMock(spec=ChatCompletion) - mock_chat_completion.choices = [MagicMock(message=MagicMock(content="test_content"))] - self.client.chat_completion_parser = lambda completion: completion.choices[0].message.content - result = self.client.parse_chat_completion(mock_chat_completion) - self.assertEqual(result, "test_content") + # @patch("adalflow.components.model_client.azureai_client.AzureOpenAI") + # def test_parse_chat_completion(self, MockAzureOpenAI): + # mock_chat_completion = MagicMock(spec=ChatCompletion) + # mock_chat_completion.choices = [ + # MagicMock(message=MagicMock(content="test_content")) + # ] + # self.client.chat_completion_parser = lambda completion: completion.choices[ + # 0 + # ].message.content + # result = self.client.parse_chat_completion(mock_chat_completion) + # self.assertEqual(result, "test_content") - @patch('adalflow.components.model_client.openai_client.AzureOpenAI') - def test_track_completion_usage(self, MockAzureOpenAI): - mock_chat_completion = MagicMock(spec=ChatCompletion) - mock_chat_completion.usage = MagicMock( - completion_tokens=10, - prompt_tokens=5, - total_tokens=15 - ) - result = self.client.track_completion_usage(mock_chat_completion) - self.assertEqual(result.completion_tokens, 10) - self.assertEqual(result.prompt_tokens, 5) - self.assertEqual(result.total_tokens, 15) + # @patch("adalflow.components.model_client.azureai_client.AzureOpenAI") + # def test_track_completion_usage(self, MockAzureOpenAI): + # mock_chat_completion = MagicMock(spec=ChatCompletion) + # mock_chat_completion.usage = MagicMock( + # completion_tokens=10, prompt_tokens=5, total_tokens=15 + # ) + # result = self.client.track_completion_usage(mock_chat_completion) + # self.assertEqual(result.completion_tokens, 10) + # self.assertEqual(result.prompt_tokens, 5) + # self.assertEqual(result.total_tokens, 15) + + # @patch("adalflow.components.model_client.azureai_client.AzureOpenAI") + # def test_parse_embedding_response(self, MockAzureOpenAI): + # mock_embedding_response = MagicMock(spec=CreateEmbeddingResponse) + # self.client.parse_embedding_response = lambda response: EmbedderOutput( + # data=["test_embedding"], error=None, raw_response=response + # ) + # result = self.client.parse_embedding_response(mock_embedding_response) + # self.assertEqual(result.data, ["test_embedding"]) - @patch('adalflow.components.model_client.openai_client.AzureOpenAI') - def test_parse_embedding_response(self, MockAzureOpenAI): - mock_embedding_response = MagicMock(spec=CreateEmbeddingResponse) - self.client.parse_embedding_response = lambda response: EmbedderOutput(data=["test_embedding"], error=None, raw_response=response) - result = self.client.parse_embedding_response(mock_embedding_response) - self.assertEqual(result.data, ["test_embedding"]) + # @patch("adalflow.components.model_client.azureai_client.AzureOpenAI") + # def test_convert_inputs_to_api_kwargs(self, MockAzureOpenAI): + # input_data = "test input" + # model_kwargs = {"param": "value"} + # result = self.client.convert_inputs_to_api_kwargs( + # input=input_data, model_kwargs=model_kwargs, model_type=ModelType.LLM + # ) + # expected = {"input": input_data, "param": "value"} + # self.assertEqual(result, expected) - @patch('adalflow.components.model_client.openai_client.AzureOpenAI') - def test_convert_inputs_to_api_kwargs(self, MockAzureOpenAI): - input_data = "test input" - model_kwargs = {"param": "value"} - result = self.client.convert_inputs_to_api_kwargs(input=input_data, model_kwargs=model_kwargs, model_type=ModelType.LLM) - expected = { - "input": input_data, - "param": "value" - } - self.assertEqual(result, expected) + # def test_from_dict(self): + # data = { + # "api_key": "test_key", + # "api_version": "v1", + # "azure_endpoint": "https://test.endpoint", + # "credential": self.mock_credential, + # } + # client = AzureAIClient.from_dict(data) + # self.assertEqual(client._api_key, "test_key") + # self.assertEqual(client._apiversion, "v1") + # self.assertEqual(client._azure_endpoint, "https://test.endpoint") - def test_from_dict(self): - data = { - "api_key": "test_key", - "api_version": "v1", - "azure_endpoint": "https://test.endpoint", - "credential": self.mock_credential, - } - client = AzureAIClient.from_dict(data) - self.assertEqual(client._api_key, "test_key") - self.assertEqual(client._apiversion, "v1") - self.assertEqual(client._azure_endpoint, "https://test.endpoint") + # def test_to_dict(self): + # expected = { + # "api_key": "test_api_key", + # "api_version": "v1", + # "azure_endpoint": "https://test.endpoint", + # "credential": self.mock_credential, + # } + # result = self.client.to_dict() + # for key, value in expected.items(): + # self.assertEqual(result.get(key), value) - def test_to_dict(self): - expected = { - "api_key": "test_api_key", - "api_version": "v1", - "azure_endpoint": "https://test.endpoint", - "credential": self.mock_credential, - } - result = self.client.to_dict() - for key, value in expected.items(): - self.assertEqual(result.get(key), value) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/adalflow/tests/test_lazy_import.py b/adalflow/tests/test_lazy_import.py index 1731308a..25a3d634 100644 --- a/adalflow/tests/test_lazy_import.py +++ b/adalflow/tests/test_lazy_import.py @@ -1,7 +1,8 @@ from types import ModuleType import pytest import unittest -from adalflow.utils.lazy_import import safe_import +from unittest.mock import MagicMock, patch +from adalflow.utils.lazy_import import safe_import, OptionalPackages class TestSafeImport(unittest.TestCase): @@ -27,6 +28,112 @@ def test_import_nonexistent_package(self): (f"Expected error message not found in {str(cm.exception)}"), ) + @patch("importlib.import_module") + def test_import_with_multiple_packages_success(self, mock_import): + """Test that safe_import successfully imports a module if all modules exist""" + # Set up the mock to return a MagicMock to simulate successful imports + mock_import.side_effect = [MagicMock(name="successful_import")] + + packages = ["math"] + imported_module = safe_import(packages, "Please install the required packages.") + + self.assertTrue( + hasattr(imported_module, "sqrt"), "math module did not load correctly." + ) + + @patch("importlib.import_module") + def test_import_with_multiple_packages_failure(self, mock_import): + """Test that safe_import raises ImportError when any of the modules in the list fail to import""" + # Set up the mock to raise ImportError for the first package + mock_import.side_effect = ImportError + + packages = ["non_existent_module_1", "math"] + with self.assertRaises(ImportError) as context: + safe_import(packages, "Please install the required packages.") + + self.assertIn( + "Please install the required packages", + str(context.exception), + "Expected ImportError message does not match.", + ) + + ############################################################################################################ + # For AWS bedrock_client + ############################################################################################################ + @patch("importlib.import_module") + def test_successful_import_boto3(self, mock_import): + """Test that safe_import successfully imports boto3 if installed""" + # Simulate a successful import of boto3 by returning a MagicMock object + mock_import.side_effect = lambda name: ( + MagicMock(name="boto3") if name == "boto3" else ImportError + ) + + module_name = OptionalPackages.BOTO3.value[0] + imported_module = safe_import(module_name, OptionalPackages.BOTO3.value[1]) + + # Assert that the mock was called with 'boto3' and the imported module is a MagicMock instance + mock_import.assert_called_with("boto3") + self.assertIsInstance( + imported_module, MagicMock, "boto3 module did not load correctly." + ) + + @patch("importlib.import_module", side_effect=ImportError) + def test_failed_import_boto3(self, mock_import): + """Test that safe_import raises an ImportError when boto3 is not installed""" + module_name = OptionalPackages.BOTO3.value[0] + with self.assertRaises(ImportError) as context: + safe_import(module_name, OptionalPackages.BOTO3.value[1]) + + self.assertIn( + "Please install boto3 with: pip install boto3", + str(context.exception), + "Expected ImportError message for boto3 does not match.", + ) + + ############################################################################################################ + # For Azure ai model client + ############################################################################################################ + @patch("importlib.import_module") + def test_successful_import_azure(self, mock_import): + """Test that safe_import successfully imports all Azure packages""" + # Simulate successful imports for each Azure package by returning a MagicMock for each + mock_import.side_effect = lambda name: MagicMock(name=name) + + module_names = OptionalPackages.AZURE.value[0] + imported_modules = safe_import(module_names, OptionalPackages.AZURE.value[1]) + + # Ensure that all azure modules were attempted to be imported + for module_name in module_names: + mock_import.assert_any_call(module_name) + + # Verify that the imported_modules is a list and contains each module as a MagicMock + self.assertIsInstance(imported_modules, list, "Expected a list of modules.") + self.assertEqual( + len(imported_modules), + len(module_names), + "Not all Azure modules were imported.", + ) + for imported_module, module_name in zip(imported_modules, module_names): + self.assertIsInstance( + imported_module, MagicMock, f"{module_name} did not load correctly." + ) + + @patch("importlib.import_module") + def test_failed_import_azure(self, mock_import): + """Test that safe_import raises ImportError when any Azure package is not installed""" + # Set up the mock to raise ImportError for all Azure packages to simulate a missing package + mock_import.side_effect = ImportError + + module_names = OptionalPackages.AZURE.value[0] + with self.assertRaises(ImportError) as context: + safe_import(module_names, OptionalPackages.AZURE.value[1]) + + self.assertIn( + "Please install Azure packages with: pip install azure-identity azure-core azure-ai-formrecognizer azure-ai-textanalytics", + str(context.exception), + "Expected ImportError message for Azure packages does not match.", + ) + if __name__ == "__main__": pytest.main() diff --git a/docs/poetry.lock b/docs/poetry.lock index dfa2d273..9c079273 100644 --- a/docs/poetry.lock +++ b/docs/poetry.lock @@ -20,7 +20,7 @@ tests = ["hypothesis", "pytest"] [[package]] name = "adalflow" -version = "0.2.0.beta.3" +version = "0.2.3" description = "The Library to Build and Auto-optimize Any LLM Task Pipeline" optional = false python-versions = ">=3.9, <4.0" @@ -30,6 +30,7 @@ develop = true [package.dependencies] backoff = "^2.2.1" botocore = "^1.34.149" +colorama = "^0.4.6" diskcache = "^5.6.3" jinja2 = "^3.1.3" jsonlines = "^4.0.0" @@ -42,7 +43,10 @@ tqdm = "^4.66.4" [package.extras] anthropic = ["anthropic (>=0.31.1,<0.32.0)"] +azure = ["azure-core (>=1.24.0,<2.0.0)", "azure-identity (>=1.12.0,<2.0.0)"] +bedrock = ["boto3 (>=1.35.19,<2.0.0)"] cohere = ["cohere (>=5.5.8,<6.0.0)"] +datasets = [] faiss-cpu = ["faiss-cpu (>=1.8.0,<2.0.0)"] google-generativeai = ["google-generativeai (>=0.7.2,<0.8.0)"] groq = ["groq (>=0.9.0,<0.10.0)"] @@ -277,6 +281,43 @@ tests = ["attrs[tests-no-zope]", "zope-interface"] tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +[[package]] +name = "azure-core" +version = "1.31.0" +description = "Microsoft Azure Core Library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "azure_core-1.31.0-py3-none-any.whl", hash = "sha256:22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd"}, + {file = "azure_core-1.31.0.tar.gz", hash = "sha256:656a0dd61e1869b1506b7c6a3b31d62f15984b1a573d6326f6aa2f3e4123284b"}, +] + +[package.dependencies] +requests = ">=2.21.0" +six = ">=1.11.0" +typing-extensions = ">=4.6.0" + +[package.extras] +aio = ["aiohttp (>=3.0)"] + +[[package]] +name = "azure-identity" +version = "1.18.0" +description = "Microsoft Azure Identity Library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "azure_identity-1.18.0-py3-none-any.whl", hash = "sha256:bccf6106245b49ff41d0c4cd7b72851c5a2ba3a32cef7589da246f5727f26f02"}, + {file = "azure_identity-1.18.0.tar.gz", hash = "sha256:f567579a65d8932fa913c76eddf3305101a15e5727a5e4aa5df649a0f553d4c3"}, +] + +[package.dependencies] +azure-core = ">=1.31.0" +cryptography = ">=2.5" +msal = ">=1.30.0" +msal-extensions = ">=1.2.0" +typing-extensions = ">=4.0.0" + [[package]] name = "babel" version = "2.15.0" @@ -601,6 +642,55 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "cryptography" +version = "43.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "datasets" version = "2.20.0" @@ -1822,6 +1912,40 @@ docs = ["sphinx"] gmpy = ["gmpy2 (>=2.1.0a4)"] tests = ["pytest (>=4.6)"] +[[package]] +name = "msal" +version = "1.31.0" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." +optional = false +python-versions = ">=3.7" +files = [ + {file = "msal-1.31.0-py3-none-any.whl", hash = "sha256:96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7"}, + {file = "msal-1.31.0.tar.gz", hash = "sha256:2c4f189cf9cc8f00c80045f66d39b7c0f3ed45873fd3d1f2af9f22db2e12ff4b"}, +] + +[package.dependencies] +cryptography = ">=2.5,<46" +PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +requests = ">=2.0.0,<3" + +[package.extras] +broker = ["pymsalruntime (>=0.14,<0.18)", "pymsalruntime (>=0.17,<0.18)"] + +[[package]] +name = "msal-extensions" +version = "1.2.0" +description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." +optional = false +python-versions = ">=3.7" +files = [ + {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, + {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, +] + +[package.dependencies] +msal = ">=1.29,<2" +portalocker = ">=1.4,<3" + [[package]] name = "multidict" version = "6.0.5" @@ -2801,6 +2925,26 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pyjwt" +version = "2.9.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pyparsing" version = "3.1.2" @@ -4551,4 +4695,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" -content-hash = "9558d0212fb3ace6733d867de454171304261bbd582561b0e09950faf19e2e4f" +content-hash = "850edd56328727c4ff8ebf9b6642b37ff645c866d2406beb21cbaf6d64332ebf" diff --git a/docs/pyproject.toml b/docs/pyproject.toml index 30b5a5a4..3c69682b 100644 --- a/docs/pyproject.toml +++ b/docs/pyproject.toml @@ -10,6 +10,7 @@ packages = [{ from = "_dummy", include = "dummy" }] # empty packages python = ">=3.9, <4.0" adalflow = { path = "../adalflow", develop = true } # document always follow the latest version of the library + [tool.poetry.group.doc.dependencies] pydata-sphinx-theme = "^0.15.3" sphinx-design = "^0.6.0" @@ -20,6 +21,7 @@ nbconvert = "^7.16.4" pandoc = "^2.3" readthedocs-sphinx-search = "^0.3.2" # add optional packages to compile the documents correctly +datasets = "^2.20.0" sqlalchemy = "^2.0.31" google-generativeai = "^0.7.1" anthropic = "^0.30.1" @@ -32,9 +34,12 @@ faiss-cpu = "^1.8.0.post1" ollama = "^0.3.0" qdrant-client = "^1.10.1" diskcache = "^5.6.3" -datasets = "^2.20.0" torch = "^2.4.0" +# azure model_client +azure-identity = "^1.18.0" +azure-core = "^1.31.0" + [build-system] requires = ["poetry-core>=1.0.0"] From 20f390c8caefc0b5f6291464adf90955e48b1ffd Mon Sep 17 00:00:00 2001 From: Li Yin Date: Sun, 6 Oct 2024 15:05:27 +0800 Subject: [PATCH 11/40] update the poetry.lock to fix the conflict --- adalflow/poetry.lock | 1258 ++++++++++++++++++++++-------------------- 1 file changed, 646 insertions(+), 612 deletions(-) diff --git a/adalflow/poetry.lock b/adalflow/poetry.lock index b8aa9864..997730ee 100644 --- a/adalflow/poetry.lock +++ b/adalflow/poetry.lock @@ -13,113 +13,113 @@ files = [ [[package]] name = "aiohappyeyeballs" -version = "2.4.0" +version = "2.4.3" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, - {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, ] [[package]] name = "aiohttp" -version = "3.10.5" +version = "3.10.9" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"}, - {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"}, - {file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"}, - {file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"}, - {file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"}, - {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"}, - {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"}, - {file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"}, - {file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"}, - {file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"}, - {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"}, - {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"}, - {file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"}, - {file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"}, - {file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"}, - {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"}, - {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"}, - {file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"}, - {file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"}, - {file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"}, - {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"}, - {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"}, - {file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"}, - {file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"}, - {file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"}, - {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"}, - {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"}, - {file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"}, - {file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"}, - {file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"}, - {file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"}, + {file = "aiohttp-3.10.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8b3fb28a9ac8f2558760d8e637dbf27aef1e8b7f1d221e8669a1074d1a266bb2"}, + {file = "aiohttp-3.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91aa966858593f64c8a65cdefa3d6dc8fe3c2768b159da84c1ddbbb2c01ab4ef"}, + {file = "aiohttp-3.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63649309da83277f06a15bbdc2a54fbe75efb92caa2c25bb57ca37762789c746"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e7fabedb3fe06933f47f1538df7b3a8d78e13d7167195f51ca47ee12690373"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c070430fda1a550a1c3a4c2d7281d3b8cfc0c6715f616e40e3332201a253067"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51d0a4901b27272ae54e42067bc4b9a90e619a690b4dc43ea5950eb3070afc32"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fec5fac7aea6c060f317f07494961236434928e6f4374e170ef50b3001e14581"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:172ad884bb61ad31ed7beed8be776eb17e7fb423f1c1be836d5cb357a096bf12"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d646fdd74c25bbdd4a055414f0fe32896c400f38ffbdfc78c68e62812a9e0257"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e86260b76786c28acf0b5fe31c8dca4c2add95098c709b11e8c35b424ebd4f5b"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d7cafc11d70fdd8801abfc2ff276744ae4cb39d8060b6b542c7e44e5f2cfc2"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc262c3df78c8ff6020c782d9ce02e4bcffe4900ad71c0ecdad59943cba54442"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:482c85cf3d429844396d939b22bc2a03849cb9ad33344689ad1c85697bcba33a"}, + {file = "aiohttp-3.10.9-cp310-cp310-win32.whl", hash = "sha256:aeebd3061f6f1747c011e1d0b0b5f04f9f54ad1a2ca183e687e7277bef2e0da2"}, + {file = "aiohttp-3.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:fa430b871220dc62572cef9c69b41e0d70fcb9d486a4a207a5de4c1f25d82593"}, + {file = "aiohttp-3.10.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:16e6a51d8bc96b77f04a6764b4ad03eeef43baa32014fce71e882bd71302c7e4"}, + {file = "aiohttp-3.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8bd9125dd0cc8ebd84bff2be64b10fdba7dc6fd7be431b5eaf67723557de3a31"}, + {file = "aiohttp-3.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dcf354661f54e6a49193d0b5653a1b011ba856e0b7a76bda2c33e4c6892f34ea"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42775de0ca04f90c10c5c46291535ec08e9bcc4756f1b48f02a0657febe89b10"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d1e4185c5d7187684d41ebb50c9aeaaaa06ca1875f4c57593071b0409d2444"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2695c61cf53a5d4345a43d689f37fc0f6d3a2dc520660aec27ec0f06288d1f9"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a3f063b41cc06e8d0b3fcbbfc9c05b7420f41287e0cd4f75ce0a1f3d80729e6"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d37f4718002863b82c6f391c8efd4d3a817da37030a29e2682a94d2716209de"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2746d8994ebca1bdc55a1e998feff4e94222da709623bb18f6e5cfec8ec01baf"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6f3c6648aa123bcd73d6f26607d59967b607b0da8ffcc27d418a4b59f4c98c7c"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:558b3d223fd631ad134d89adea876e7fdb4c93c849ef195049c063ada82b7d08"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4e6cb75f8ddd9c2132d00bc03c9716add57f4beff1263463724f6398b813e7eb"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:608cecd8d58d285bfd52dbca5b6251ca8d6ea567022c8a0eaae03c2589cd9af9"}, + {file = "aiohttp-3.10.9-cp311-cp311-win32.whl", hash = "sha256:36d4fba838be5f083f5490ddd281813b44d69685db910907636bc5dca6322316"}, + {file = "aiohttp-3.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:8be1a65487bdfc285bd5e9baf3208c2132ca92a9b4020e9f27df1b16fab998a9"}, + {file = "aiohttp-3.10.9-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4fd16b30567c5b8e167923be6e027eeae0f20cf2b8a26b98a25115f28ad48ee0"}, + {file = "aiohttp-3.10.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:40ff5b7660f903dc587ed36ef08a88d46840182d9d4b5694e7607877ced698a1"}, + {file = "aiohttp-3.10.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4edc3fd701e2b9a0d605a7b23d3de4ad23137d23fc0dbab726aa71d92f11aaaf"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e525b69ee8a92c146ae5b4da9ecd15e518df4d40003b01b454ad694a27f498b5"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5002a02c17fcfd796d20bac719981d2fca9c006aac0797eb8f430a58e9d12431"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4ceeae2fb8cabdd1b71c82bfdd39662473d3433ec95b962200e9e752fb70d0"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e395c3d1f773cf0651cd3559e25182eb0c03a2777b53b4575d8adc1149c6e9"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbdb8def5268f3f9cd753a265756f49228a20ed14a480d151df727808b4531dd"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f82ace0ec57c94aaf5b0e118d4366cff5889097412c75aa14b4fd5fc0c44ee3e"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6ebdc3b3714afe1b134b3bbeb5f745eed3ecbcff92ab25d80e4ef299e83a5465"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f9ca09414003c0e96a735daa1f071f7d7ed06962ef4fa29ceb6c80d06696d900"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1298b854fd31d0567cbb916091be9d3278168064fca88e70b8468875ef9ff7e7"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:60ad5b8a7452c0f5645c73d4dad7490afd6119d453d302cd5b72b678a85d6044"}, + {file = "aiohttp-3.10.9-cp312-cp312-win32.whl", hash = "sha256:1a0ee6c0d590c917f1b9629371fce5f3d3f22c317aa96fbdcce3260754d7ea21"}, + {file = "aiohttp-3.10.9-cp312-cp312-win_amd64.whl", hash = "sha256:c46131c6112b534b178d4e002abe450a0a29840b61413ac25243f1291613806a"}, + {file = "aiohttp-3.10.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2bd9f3eac515c16c4360a6a00c38119333901b8590fe93c3257a9b536026594d"}, + {file = "aiohttp-3.10.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8cc0d13b4e3b1362d424ce3f4e8c79e1f7247a00d792823ffd640878abf28e56"}, + {file = "aiohttp-3.10.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ba1a599255ad6a41022e261e31bc2f6f9355a419575b391f9655c4d9e5df5ff5"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:776e9f3c9b377fcf097c4a04b241b15691e6662d850168642ff976780609303c"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8debb45545ad95b58cc16c3c1cc19ad82cffcb106db12b437885dbee265f0ab5"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2555e4949c8d8782f18ef20e9d39730d2656e218a6f1a21a4c4c0b56546a02e"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c54dc329cd44f7f7883a9f4baaefe686e8b9662e2c6c184ea15cceee587d8d69"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e709d6ac598c5416f879bb1bae3fd751366120ac3fa235a01de763537385d036"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:17c272cfe7b07a5bb0c6ad3f234e0c336fb53f3bf17840f66bd77b5815ab3d16"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0c21c82df33b264216abffff9f8370f303dab65d8eee3767efbbd2734363f677"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9331dd34145ff105177855017920dde140b447049cd62bb589de320fd6ddd582"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ac3196952c673822ebed8871cf8802e17254fff2a2ed4835d9c045d9b88c5ec7"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2c33fa6e10bb7ed262e3ff03cc69d52869514f16558db0626a7c5c61dde3c29f"}, + {file = "aiohttp-3.10.9-cp313-cp313-win32.whl", hash = "sha256:a14e4b672c257a6b94fe934ee62666bacbc8e45b7876f9dd9502d0f0fe69db16"}, + {file = "aiohttp-3.10.9-cp313-cp313-win_amd64.whl", hash = "sha256:a35ed3d03910785f7d9d6f5381f0c24002b2b888b298e6f941b2fc94c5055fcd"}, + {file = "aiohttp-3.10.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f392ef50e22c31fa49b5a46af7f983fa3f118f3eccb8522063bee8bfa6755f8"}, + {file = "aiohttp-3.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d1f5c9169e26db6a61276008582d945405b8316aae2bb198220466e68114a0f5"}, + {file = "aiohttp-3.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8d9d10d10ec27c0d46ddaecc3c5598c4db9ce4e6398ca872cdde0525765caa2f"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d97273a52d7f89a75b11ec386f786d3da7723d7efae3034b4dda79f6f093edc1"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d271f770b52e32236d945911b2082f9318e90ff835d45224fa9e28374303f729"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7003f33f5f7da1eb02f0446b0f8d2ccf57d253ca6c2e7a5732d25889da82b517"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6e00c8a92e7663ed2be6fcc08a2997ff06ce73c8080cd0df10cc0321a3168d7"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a61df62966ce6507aafab24e124e0c3a1cfbe23c59732987fc0fd0d71daa0b88"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:60555211a006d26e1a389222e3fab8cd379f28e0fbf7472ee55b16c6c529e3a6"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d15a29424e96fad56dc2f3abed10a89c50c099f97d2416520c7a543e8fddf066"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:a19caae0d670771ea7854ca30df76f676eb47e0fd9b2ee4392d44708f272122d"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:99f9678bf0e2b1b695e8028fedac24ab6770937932eda695815d5a6618c37e04"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2914caa46054f3b5ff910468d686742ff8cff54b8a67319d75f5d5945fd0a13d"}, + {file = "aiohttp-3.10.9-cp38-cp38-win32.whl", hash = "sha256:0bc059ecbce835630e635879f5f480a742e130d9821fbe3d2f76610a6698ee25"}, + {file = "aiohttp-3.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:e883b61b75ca6efc2541fcd52a5c8ccfe288b24d97e20ac08fdf343b8ac672ea"}, + {file = "aiohttp-3.10.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fcd546782d03181b0b1d20b43d612429a90a68779659ba8045114b867971ab71"}, + {file = "aiohttp-3.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:85711eec2d875cd88c7eb40e734c4ca6d9ae477d6f26bd2b5bb4f7f60e41b156"}, + {file = "aiohttp-3.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02d1d6610588bcd743fae827bd6f2e47e0d09b346f230824b4c6fb85c6065f9c"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3668d0c2a4d23fb136a753eba42caa2c0abbd3d9c5c87ee150a716a16c6deec1"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7c071235a47d407b0e93aa6262b49422dbe48d7d8566e1158fecc91043dd948"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac74e794e3aee92ae8f571bfeaa103a141e409863a100ab63a253b1c53b707eb"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bbf94d4a0447705b7775417ca8bb8086cc5482023a6e17cdc8f96d0b1b5aba6"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb0b2d5d51f96b6cc19e6ab46a7b684be23240426ae951dcdac9639ab111b45e"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e83dfefb4f7d285c2d6a07a22268344a97d61579b3e0dce482a5be0251d672ab"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f0a44bb40b6aaa4fb9a5c1ee07880570ecda2065433a96ccff409c9c20c1624a"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c2b627d3c8982691b06d89d31093cee158c30629fdfebe705a91814d49b554f8"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:03690541e4cc866eef79626cfa1ef4dd729c5c1408600c8cb9e12e1137eed6ab"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad3675c126f2a95bde637d162f8231cff6bc0bc9fbe31bd78075f9ff7921e322"}, + {file = "aiohttp-3.10.9-cp39-cp39-win32.whl", hash = "sha256:1321658f12b6caffafdc35cfba6c882cb014af86bef4e78c125e7e794dfb927b"}, + {file = "aiohttp-3.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:9fdf5c839bf95fc67be5794c780419edb0dbef776edcfc6c2e5e2ffd5ee755fa"}, + {file = "aiohttp-3.10.9.tar.gz", hash = "sha256:143b0026a9dab07a05ad2dd9e46aa859bffdd6348ddc5967b42161168c24f857"}, ] [package.dependencies] @@ -129,7 +129,7 @@ async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" +yarl = ">=1.12.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] @@ -186,13 +186,13 @@ vertex = ["google-auth (>=2,<3)"] [[package]] name = "anyio" -version = "4.4.0" +version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, ] [package.dependencies] @@ -202,9 +202,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "async-timeout" @@ -286,18 +286,17 @@ files = [ [[package]] name = "boto3" -version = "1.35.19" +version = "1.35.34" description = "The AWS SDK for Python" optional = true -optional = true python-versions = ">=3.8" files = [ - {file = "boto3-1.35.19-py3-none-any.whl", hash = "sha256:84b3fe1727945bc3cada832d969ddb3dc0d08fce1677064ca8bdc13a89c1a143"}, - {file = "boto3-1.35.19.tar.gz", hash = "sha256:9979fe674780a0b7100eae9156d74ee374cd1638a9f61c77277e3ce712f3e496"}, + {file = "boto3-1.35.34-py3-none-any.whl", hash = "sha256:291e7b97a34967ed93297e6171f1bebb8529e64633dd48426760e3fdef1cdea8"}, + {file = "boto3-1.35.34.tar.gz", hash = "sha256:57e6ee8504e7929bc094bb2afc879943906064179a1e88c23b4812e2c6f61532"}, ] [package.dependencies] -botocore = ">=1.35.19,<1.36.0" +botocore = ">=1.35.34,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -306,13 +305,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.19" +version = "1.35.34" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.19-py3-none-any.whl", hash = "sha256:c83f7f0cacfe7c19b109b363ebfa8736e570d24922f16ed371681f58ebab44a9"}, - {file = "botocore-1.35.19.tar.gz", hash = "sha256:42d6d8db7250cbd7899f786f9861e02cab17dc238f64d6acb976098ed9809625"}, + {file = "botocore-1.35.34-py3-none-any.whl", hash = "sha256:ccb0fe397b11b81c9abc0c87029d17298e17bf658d8db5c0c5a551a12a207e7a"}, + {file = "botocore-1.35.34.tar.gz", hash = "sha256:789b6501a3bb4a9591c1fe10da200cc315c1fa5df5ada19c720d8ef06439b3e3"}, ] [package.dependencies] @@ -324,7 +323,7 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.21.5)"] +crt = ["awscrt (==0.22.0)"] [[package]] name = "cachetools" @@ -539,13 +538,13 @@ files = [ [[package]] name = "cohere" -version = "5.9.2" +version = "5.10.0" description = "" optional = true python-versions = "<4.0,>=3.8" files = [ - {file = "cohere-5.9.2-py3-none-any.whl", hash = "sha256:169ee06b0a54f8a913d42b19123bd72c1a72833275d544a52606d307f5547a7b"}, - {file = "cohere-5.9.2.tar.gz", hash = "sha256:1860c527b2a8a5593873a342b0bf572220b6db7966c0782076b3f2740ab3d94d"}, + {file = "cohere-5.10.0-py3-none-any.whl", hash = "sha256:46e50e3e8514a99cf77b4c022c8077a6205fba948051c33087ddeb66ec706f0a"}, + {file = "cohere-5.10.0.tar.gz", hash = "sha256:21020a7ae4c30f72991ef91566a926a9d7d1485d7abeed7bfa2bd6f35ea34783"}, ] [package.dependencies] @@ -913,69 +912,75 @@ zstandard = ["zstandard"] [[package]] name = "filelock" -version = "3.16.0" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, - {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "fonttools" -version = "4.53.1" +version = "4.54.1" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, - {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, - {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, - {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, - {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, - {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, - {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, - {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, - {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, - {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, - {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, - {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, - {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, - {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, + {file = "fonttools-4.54.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ed7ee041ff7b34cc62f07545e55e1468808691dddfd315d51dd82a6b37ddef2"}, + {file = "fonttools-4.54.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41bb0b250c8132b2fcac148e2e9198e62ff06f3cc472065dff839327945c5882"}, + {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7965af9b67dd546e52afcf2e38641b5be956d68c425bef2158e95af11d229f10"}, + {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278913a168f90d53378c20c23b80f4e599dca62fbffae4cc620c8eed476b723e"}, + {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0e88e3018ac809b9662615072dcd6b84dca4c2d991c6d66e1970a112503bba7e"}, + {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4817f0031206e637d1e685251ac61be64d1adef111060df84fdcbc6ab6c44"}, + {file = "fonttools-4.54.1-cp310-cp310-win32.whl", hash = "sha256:7e3b7d44e18c085fd8c16dcc6f1ad6c61b71ff463636fcb13df7b1b818bd0c02"}, + {file = "fonttools-4.54.1-cp310-cp310-win_amd64.whl", hash = "sha256:dd9cc95b8d6e27d01e1e1f1fae8559ef3c02c76317da650a19047f249acd519d"}, + {file = "fonttools-4.54.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5419771b64248484299fa77689d4f3aeed643ea6630b2ea750eeab219588ba20"}, + {file = "fonttools-4.54.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:301540e89cf4ce89d462eb23a89464fef50915255ece765d10eee8b2bf9d75b2"}, + {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ae5091547e74e7efecc3cbf8e75200bc92daaeb88e5433c5e3e95ea8ce5aa7"}, + {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82834962b3d7c5ca98cb56001c33cf20eb110ecf442725dc5fdf36d16ed1ab07"}, + {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d26732ae002cc3d2ecab04897bb02ae3f11f06dd7575d1df46acd2f7c012a8d8"}, + {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58974b4987b2a71ee08ade1e7f47f410c367cdfc5a94fabd599c88165f56213a"}, + {file = "fonttools-4.54.1-cp311-cp311-win32.whl", hash = "sha256:ab774fa225238986218a463f3fe151e04d8c25d7de09df7f0f5fce27b1243dbc"}, + {file = "fonttools-4.54.1-cp311-cp311-win_amd64.whl", hash = "sha256:07e005dc454eee1cc60105d6a29593459a06321c21897f769a281ff2d08939f6"}, + {file = "fonttools-4.54.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:54471032f7cb5fca694b5f1a0aaeba4af6e10ae989df408e0216f7fd6cdc405d"}, + {file = "fonttools-4.54.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fa92cb248e573daab8d032919623cc309c005086d743afb014c836636166f08"}, + {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a911591200114969befa7f2cb74ac148bce5a91df5645443371aba6d222e263"}, + {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93d458c8a6a354dc8b48fc78d66d2a8a90b941f7fec30e94c7ad9982b1fa6bab"}, + {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5eb2474a7c5be8a5331146758debb2669bf5635c021aee00fd7c353558fc659d"}, + {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9c563351ddc230725c4bdf7d9e1e92cbe6ae8553942bd1fb2b2ff0884e8b714"}, + {file = "fonttools-4.54.1-cp312-cp312-win32.whl", hash = "sha256:fdb062893fd6d47b527d39346e0c5578b7957dcea6d6a3b6794569370013d9ac"}, + {file = "fonttools-4.54.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4564cf40cebcb53f3dc825e85910bf54835e8a8b6880d59e5159f0f325e637e"}, + {file = "fonttools-4.54.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6e37561751b017cf5c40fce0d90fd9e8274716de327ec4ffb0df957160be3bff"}, + {file = "fonttools-4.54.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:357cacb988a18aace66e5e55fe1247f2ee706e01debc4b1a20d77400354cddeb"}, + {file = "fonttools-4.54.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e953cc0bddc2beaf3a3c3b5dd9ab7554677da72dfaf46951e193c9653e515a"}, + {file = "fonttools-4.54.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58d29b9a294573d8319f16f2f79e42428ba9b6480442fa1836e4eb89c4d9d61c"}, + {file = "fonttools-4.54.1-cp313-cp313-win32.whl", hash = "sha256:9ef1b167e22709b46bf8168368b7b5d3efeaaa746c6d39661c1b4405b6352e58"}, + {file = "fonttools-4.54.1-cp313-cp313-win_amd64.whl", hash = "sha256:262705b1663f18c04250bd1242b0515d3bbae177bee7752be67c979b7d47f43d"}, + {file = "fonttools-4.54.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ed2f80ca07025551636c555dec2b755dd005e2ea8fbeb99fc5cdff319b70b23b"}, + {file = "fonttools-4.54.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dc080e5a1c3b2656caff2ac2633d009b3a9ff7b5e93d0452f40cd76d3da3b3c"}, + {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d152d1be65652fc65e695e5619e0aa0982295a95a9b29b52b85775243c06556"}, + {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8583e563df41fdecef31b793b4dd3af8a9caa03397be648945ad32717a92885b"}, + {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0d1d353ef198c422515a3e974a1e8d5b304cd54a4c2eebcae708e37cd9eeffb1"}, + {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fda582236fee135d4daeca056c8c88ec5f6f6d88a004a79b84a02547c8f57386"}, + {file = "fonttools-4.54.1-cp38-cp38-win32.whl", hash = "sha256:e7d82b9e56716ed32574ee106cabca80992e6bbdcf25a88d97d21f73a0aae664"}, + {file = "fonttools-4.54.1-cp38-cp38-win_amd64.whl", hash = "sha256:ada215fd079e23e060157aab12eba0d66704316547f334eee9ff26f8c0d7b8ab"}, + {file = "fonttools-4.54.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5b8a096e649768c2f4233f947cf9737f8dbf8728b90e2771e2497c6e3d21d13"}, + {file = "fonttools-4.54.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e10d2e0a12e18f4e2dd031e1bf7c3d7017be5c8dbe524d07706179f355c5dac"}, + {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c32d7d4b0958600eac75eaf524b7b7cb68d3a8c196635252b7a2c30d80e986"}, + {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c39287f5c8f4a0c5a55daf9eaf9ccd223ea59eed3f6d467133cc727d7b943a55"}, + {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a7a310c6e0471602fe3bf8efaf193d396ea561486aeaa7adc1f132e02d30c4b9"}, + {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3b659d1029946f4ff9b6183984578041b520ce0f8fb7078bb37ec7445806b33"}, + {file = "fonttools-4.54.1-cp39-cp39-win32.whl", hash = "sha256:e96bc94c8cda58f577277d4a71f51c8e2129b8b36fd05adece6320dd3d57de8a"}, + {file = "fonttools-4.54.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8a4b261c1ef91e7188a30571be6ad98d1c6d9fa2427244c545e2fa0a2494dd7"}, + {file = "fonttools-4.54.1-py3-none-any.whl", hash = "sha256:37cddd62d83dc4f72f7c3f3c2bcf2697e89a30efb152079896544a93907733bd"}, + {file = "fonttools-4.54.1.tar.gz", hash = "sha256:957f669d4922f92c171ba01bef7f29410668db09f6c02111e22b2bce446f3285"}, ] [package.extras] @@ -1139,13 +1144,13 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 [[package]] name = "google-api-core" -version = "2.19.2" +version = "2.20.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_core-2.19.2-py3-none-any.whl", hash = "sha256:53ec0258f2837dd53bbd3d3df50f5359281b3cc13f800c941dd15a9b5a415af4"}, - {file = "google_api_core-2.19.2.tar.gz", hash = "sha256:ca07de7e8aa1c98a8bfca9321890ad2340ef7f2eb136e558cee68f24b94b0a8f"}, + {file = "google_api_core-2.20.0-py3-none-any.whl", hash = "sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a"}, + {file = "google_api_core-2.20.0.tar.gz", hash = "sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f"}, ] [package.dependencies] @@ -1170,13 +1175,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.145.0" +version = "2.147.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_python_client-2.145.0-py2.py3-none-any.whl", hash = "sha256:d74da1358f3f2d63daf3c6f26bd96d89652051183bc87cf10a56ceb2a70beb50"}, - {file = "google_api_python_client-2.145.0.tar.gz", hash = "sha256:8b84dde11aaccadc127e4846f5cd932331d804ea324e353131595e3f25376e97"}, + {file = "google_api_python_client-2.147.0-py2.py3-none-any.whl", hash = "sha256:c6ecfa193c695baa41e84562d8f8f244fcd164419eca3fc9fd7565646668f9b2"}, + {file = "google_api_python_client-2.147.0.tar.gz", hash = "sha256:e864c2cf61d34c00f05278b8bdb72b93b6fa34f0de9ead51d20435f3b65f91be"}, ] [package.dependencies] @@ -1188,13 +1193,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.34.0" +version = "2.35.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, - {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, + {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, + {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, ] [package.dependencies] @@ -1282,77 +1287,84 @@ test = ["coverage", "pytest (>=7,<8.1)", "pytest-cov", "pytest-mock (>=3)"] [[package]] name = "greenlet" -version = "3.1.0" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = true python-versions = ">=3.7" files = [ - {file = "greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99"}, - {file = "greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54"}, - {file = "greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b"}, - {file = "greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9"}, - {file = "greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811"}, - {file = "greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b"}, - {file = "greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484"}, - {file = "greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3"}, - {file = "greenlet-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00"}, - {file = "greenlet-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6"}, - {file = "greenlet-3.1.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0"}, - {file = "greenlet-3.1.0-cp38-cp38-win32.whl", hash = "sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2"}, - {file = "greenlet-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910"}, - {file = "greenlet-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8"}, - {file = "greenlet-3.1.0-cp39-cp39-win32.whl", hash = "sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc"}, - {file = "greenlet-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97"}, - {file = "greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -1380,61 +1392,70 @@ typing-extensions = ">=4.7,<5" [[package]] name = "grpcio" -version = "1.66.1" +version = "1.66.2" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"}, - {file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083"}, - {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a"}, - {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d"}, - {file = "grpcio-1.66.1-cp310-cp310-win32.whl", hash = "sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c"}, - {file = "grpcio-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858"}, - {file = "grpcio-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a"}, - {file = "grpcio-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1"}, - {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e"}, - {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd"}, - {file = "grpcio-1.66.1-cp311-cp311-win32.whl", hash = "sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791"}, - {file = "grpcio-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb"}, - {file = "grpcio-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a"}, - {file = "grpcio-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761"}, - {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815"}, - {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524"}, - {file = "grpcio-1.66.1-cp312-cp312-win32.whl", hash = "sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759"}, - {file = "grpcio-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734"}, - {file = "grpcio-1.66.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2"}, - {file = "grpcio-1.66.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef"}, - {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb"}, - {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d"}, - {file = "grpcio-1.66.1-cp38-cp38-win32.whl", hash = "sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3"}, - {file = "grpcio-1.66.1-cp38-cp38-win_amd64.whl", hash = "sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce"}, - {file = "grpcio-1.66.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503"}, - {file = "grpcio-1.66.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e"}, - {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb"}, - {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c"}, - {file = "grpcio-1.66.1-cp39-cp39-win32.whl", hash = "sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45"}, - {file = "grpcio-1.66.1-cp39-cp39-win_amd64.whl", hash = "sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8"}, - {file = "grpcio-1.66.1.tar.gz", hash = "sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2"}, + {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, + {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, + {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, + {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, + {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, + {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, + {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, + {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, + {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, + {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, + {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, + {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, + {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, + {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, + {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, + {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, + {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, + {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, + {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, + {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, + {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, + {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, + {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, + {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, + {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.66.1)"] +protobuf = ["grpcio-tools (>=1.66.2)"] [[package]] name = "grpcio-status" @@ -1465,13 +1486,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.6" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, ] [package.dependencies] @@ -1482,7 +1503,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httplib2" @@ -1536,13 +1557,13 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.24.7" +version = "0.25.1" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.24.7-py3-none-any.whl", hash = "sha256:a212c555324c8a7b1ffdd07266bb7e7d69ca71aa238d27b7842d65e9a26ac3e5"}, - {file = "huggingface_hub-0.24.7.tar.gz", hash = "sha256:0ad8fb756e2831da0ac0491175b960f341fe06ebcf80ed6f8728313f95fc0207"}, + {file = "huggingface_hub-0.25.1-py3-none-any.whl", hash = "sha256:a5158ded931b3188f54ea9028097312cb0acd50bffaaa2612014c3c526b44972"}, + {file = "huggingface_hub-0.25.1.tar.gz", hash = "sha256:9ff7cb327343211fbd06e2b149b8f362fd1e389454f3f14c6db75a4999ee20ff"}, ] [package.dependencies] @@ -1584,13 +1605,13 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.9" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.9-py3-none-any.whl", hash = "sha256:69297d5da0cc9281c77efffb4e730254dd45943f45bbfb461de5991713989b1e"}, - {file = "idna-3.9.tar.gz", hash = "sha256:e5c5dafde284f26e9e0f28f6ea2d6400abd5ca099864a67f576f3981c6476124"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] @@ -2483,14 +2504,14 @@ files = [ [[package]] name = "nvidia-nvjitlink-cu12" -version = "12.6.68" +version = "12.6.77" description = "Nvidia JIT LTO Library" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nvjitlink_cu12-12.6.68-py3-none-manylinux2014_aarch64.whl", hash = "sha256:b3fd0779845f68b92063ab1393abab1ed0a23412fc520df79a8190d098b5cd6b"}, - {file = "nvidia_nvjitlink_cu12-12.6.68-py3-none-manylinux2014_x86_64.whl", hash = "sha256:125a6c2a44e96386dda634e13d944e60b07a0402d391a070e8fb4104b34ea1ab"}, - {file = "nvidia_nvjitlink_cu12-12.6.68-py3-none-win_amd64.whl", hash = "sha256:a55744c98d70317c5e23db14866a8cc2b733f7324509e941fc96276f9f37801d"}, + {file = "nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_aarch64.whl", hash = "sha256:3bf10d85bb1801e9c894c6e197e44dd137d2a0a9e43f8450e9ad13f2df0dd52d"}, + {file = "nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9ae346d16203ae4ea513be416495167a0101d33d2d14935aa9c1829a3fb45142"}, + {file = "nvidia_nvjitlink_cu12-12.6.77-py3-none-win_amd64.whl", hash = "sha256:410718cd44962bed862a31dd0318620f6f9a8b28a6291967bcfcb446a6516771"}, ] [[package]] @@ -2520,13 +2541,13 @@ httpx = ">=0.27.0,<0.28.0" [[package]] name = "openai" -version = "1.45.0" +version = "1.51.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.45.0-py3-none-any.whl", hash = "sha256:2f1f7b7cf90f038a9f1c24f0d26c0f1790c102ec5acd07ffd70a9b7feac1ff4e"}, - {file = "openai-1.45.0.tar.gz", hash = "sha256:731207d10637335413aa3c0955f8f8df30d7636a4a0f9c381f2209d32cf8de97"}, + {file = "openai-1.51.0-py3-none-any.whl", hash = "sha256:d9affafb7e51e5a27dce78589d4964ce4d6f6d560307265933a94b2e3f3c5d2c"}, + {file = "openai-1.51.0.tar.gz", hash = "sha256:8dc4f9d75ccdd5466fc8c99a952186eddceb9fd6ba694044773f3736a847149d"}, ] [package.dependencies] @@ -2555,40 +2576,53 @@ files = [ [[package]] name = "pandas" -version = "2.2.2" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] @@ -2642,12 +2676,13 @@ dev = ["jinja2"] [[package]] name = "pgvector" -version = "0.3.3" +version = "0.3.5" description = "pgvector support for Python" optional = true python-versions = ">=3.8" files = [ - {file = "pgvector-0.3.3-py2.py3-none-any.whl", hash = "sha256:2c14c9a5219ccf3757cda493dc756506992afad6233dafcecb6d8ab08155f177"}, + {file = "pgvector-0.3.5-py3-none-any.whl", hash = "sha256:56cca90392e596ea18873c593ec858a1984a77d16d1f82b8d0c180e79ef1018f"}, + {file = "pgvector-0.3.5.tar.gz", hash = "sha256:e876c9ee382c4c2f7ee57691a4c4015d688c7222e47448ce310ded03ecfafe2f"}, ] [package.dependencies] @@ -2752,13 +2787,13 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.3.3" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.3.3-py3-none-any.whl", hash = "sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5"}, - {file = "platformdirs-4.3.3.tar.gz", hash = "sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] @@ -2837,22 +2872,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.4" +version = "4.25.5" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, + {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, + {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, + {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, + {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, + {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, + {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, + {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, + {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, + {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, ] [[package]] @@ -2944,18 +2979,18 @@ files = [ [[package]] name = "pydantic" -version = "2.9.1" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, - {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.3" +pydantic-core = "2.23.4" typing-extensions = [ {version = ">=4.6.1", markers = "python_version < \"3.13\""}, {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, @@ -2967,100 +3002,100 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.3" +version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, - {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, - {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, - {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, - {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, - {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, - {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, - {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, - {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, - {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, - {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, - {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, - {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, - {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, - {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, - {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, - {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, - {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, - {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, - {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, - {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, - {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, - {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, - {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, - {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, - {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, - {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, - {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, - {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, - {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, - {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, - {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, - {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, - {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, - {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, - {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, - {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -3424,18 +3459,18 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "setuptools" -version = "74.1.2" +version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, - {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] @@ -3466,60 +3501,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.34" +version = "2.0.35" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win32.whl", hash = "sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win_amd64.whl", hash = "sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"}, - {file = "SQLAlchemy-2.0.34-py3-none-any.whl", hash = "sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f"}, - {file = "sqlalchemy-2.0.34.tar.gz", hash = "sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, ] [package.dependencies] @@ -3553,13 +3588,13 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sympy" -version = "1.13.2" +version = "1.13.3" description = "Computer algebra system (CAS) in Python" optional = false python-versions = ">=3.8" files = [ - {file = "sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9"}, - {file = "sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13"}, + {file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"}, + {file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"}, ] [package.dependencies] @@ -3570,12 +3605,12 @@ dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] [[package]] name = "tensorboard" -version = "2.17.1" +version = "2.18.0" description = "TensorBoard lets you watch Tensors Flow" optional = false python-versions = ">=3.9" files = [ - {file = "tensorboard-2.17.1-py3-none-any.whl", hash = "sha256:253701a224000eeca01eee6f7e978aea7b408f60b91eb0babdb04e78947b773e"}, + {file = "tensorboard-2.18.0-py3-none-any.whl", hash = "sha256:107ca4821745f73e2aefa02c50ff70a9b694f39f790b11e6f682f7d326745eab"}, ] [package.dependencies] @@ -3789,13 +3824,13 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.0.2" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] [[package]] @@ -3901,13 +3936,13 @@ tutorials = ["matplotlib", "pandas", "tabulate"] [[package]] name = "types-pyyaml" -version = "6.0.12.20240808" +version = "6.0.12.20240917" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"}, - {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, + {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, + {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, ] [[package]] @@ -3973,13 +4008,13 @@ files = [ [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] @@ -4028,13 +4063,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.4" +version = "20.26.6" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.4-py3-none-any.whl", hash = "sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55"}, - {file = "virtualenv-20.26.4.tar.gz", hash = "sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c"}, + {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, + {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, ] [package.dependencies] @@ -4197,103 +4232,103 @@ files = [ [[package]] name = "yarl" -version = "1.11.1" +version = "1.13.1" description = "Yet another URL library" optional = false python-versions = ">=3.8" files = [ - {file = "yarl-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:400cd42185f92de559d29eeb529e71d80dfbd2f45c36844914a4a34297ca6f00"}, - {file = "yarl-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8258c86f47e080a258993eed877d579c71da7bda26af86ce6c2d2d072c11320d"}, - {file = "yarl-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2164cd9725092761fed26f299e3f276bb4b537ca58e6ff6b252eae9631b5c96e"}, - {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08ea567c16f140af8ddc7cb58e27e9138a1386e3e6e53982abaa6f2377b38cc"}, - {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:768ecc550096b028754ea28bf90fde071c379c62c43afa574edc6f33ee5daaec"}, - {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2909fa3a7d249ef64eeb2faa04b7957e34fefb6ec9966506312349ed8a7e77bf"}, - {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01a8697ec24f17c349c4f655763c4db70eebc56a5f82995e5e26e837c6eb0e49"}, - {file = "yarl-1.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e286580b6511aac7c3268a78cdb861ec739d3e5a2a53b4809faef6b49778eaff"}, - {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4179522dc0305c3fc9782549175c8e8849252fefeb077c92a73889ccbcd508ad"}, - {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:27fcb271a41b746bd0e2a92182df507e1c204759f460ff784ca614e12dd85145"}, - {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f61db3b7e870914dbd9434b560075e0366771eecbe6d2b5561f5bc7485f39efd"}, - {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:c92261eb2ad367629dc437536463dc934030c9e7caca861cc51990fe6c565f26"}, - {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d95b52fbef190ca87d8c42f49e314eace4fc52070f3dfa5f87a6594b0c1c6e46"}, - {file = "yarl-1.11.1-cp310-cp310-win32.whl", hash = "sha256:489fa8bde4f1244ad6c5f6d11bb33e09cf0d1d0367edb197619c3e3fc06f3d91"}, - {file = "yarl-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:476e20c433b356e16e9a141449f25161e6b69984fb4cdbd7cd4bd54c17844998"}, - {file = "yarl-1.11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:946eedc12895873891aaceb39bceb484b4977f70373e0122da483f6c38faaa68"}, - {file = "yarl-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21a7c12321436b066c11ec19c7e3cb9aec18884fe0d5b25d03d756a9e654edfe"}, - {file = "yarl-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c35f493b867912f6fda721a59cc7c4766d382040bdf1ddaeeaa7fa4d072f4675"}, - {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25861303e0be76b60fddc1250ec5986c42f0a5c0c50ff57cc30b1be199c00e63"}, - {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4b53f73077e839b3f89c992223f15b1d2ab314bdbdf502afdc7bb18e95eae27"}, - {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:327c724b01b8641a1bf1ab3b232fb638706e50f76c0b5bf16051ab65c868fac5"}, - {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4307d9a3417eea87715c9736d050c83e8c1904e9b7aada6ce61b46361b733d92"}, - {file = "yarl-1.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a28bed68ab8fb7e380775f0029a079f08a17799cb3387a65d14ace16c12e2b"}, - {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:067b961853c8e62725ff2893226fef3d0da060656a9827f3f520fb1d19b2b68a"}, - {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8215f6f21394d1f46e222abeb06316e77ef328d628f593502d8fc2a9117bde83"}, - {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:498442e3af2a860a663baa14fbf23fb04b0dd758039c0e7c8f91cb9279799bff"}, - {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:69721b8effdb588cb055cc22f7c5105ca6fdaa5aeb3ea09021d517882c4a904c"}, - {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e969fa4c1e0b1a391f3fcbcb9ec31e84440253325b534519be0d28f4b6b533e"}, - {file = "yarl-1.11.1-cp311-cp311-win32.whl", hash = "sha256:7d51324a04fc4b0e097ff8a153e9276c2593106a811704025bbc1d6916f45ca6"}, - {file = "yarl-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:15061ce6584ece023457fb8b7a7a69ec40bf7114d781a8c4f5dcd68e28b5c53b"}, - {file = "yarl-1.11.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a4264515f9117be204935cd230fb2a052dd3792789cc94c101c535d349b3dab0"}, - {file = "yarl-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f41fa79114a1d2eddb5eea7b912d6160508f57440bd302ce96eaa384914cd265"}, - {file = "yarl-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02da8759b47d964f9173c8675710720b468aa1c1693be0c9c64abb9d8d9a4867"}, - {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9361628f28f48dcf8b2f528420d4d68102f593f9c2e592bfc842f5fb337e44fd"}, - {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b91044952da03b6f95fdba398d7993dd983b64d3c31c358a4c89e3c19b6f7aef"}, - {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74db2ef03b442276d25951749a803ddb6e270d02dda1d1c556f6ae595a0d76a8"}, - {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e975a2211952a8a083d1b9d9ba26472981ae338e720b419eb50535de3c02870"}, - {file = "yarl-1.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aef97ba1dd2138112890ef848e17d8526fe80b21f743b4ee65947ea184f07a2"}, - {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7915ea49b0c113641dc4d9338efa9bd66b6a9a485ffe75b9907e8573ca94b84"}, - {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:504cf0d4c5e4579a51261d6091267f9fd997ef58558c4ffa7a3e1460bd2336fa"}, - {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3de5292f9f0ee285e6bd168b2a77b2a00d74cbcfa420ed078456d3023d2f6dff"}, - {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a34e1e30f1774fa35d37202bbeae62423e9a79d78d0874e5556a593479fdf239"}, - {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66b63c504d2ca43bf7221a1f72fbe981ff56ecb39004c70a94485d13e37ebf45"}, - {file = "yarl-1.11.1-cp312-cp312-win32.whl", hash = "sha256:a28b70c9e2213de425d9cba5ab2e7f7a1c8ca23a99c4b5159bf77b9c31251447"}, - {file = "yarl-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:17b5a386d0d36fb828e2fb3ef08c8829c1ebf977eef88e5367d1c8c94b454639"}, - {file = "yarl-1.11.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1fa2e7a406fbd45b61b4433e3aa254a2c3e14c4b3186f6e952d08a730807fa0c"}, - {file = "yarl-1.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:750f656832d7d3cb0c76be137ee79405cc17e792f31e0a01eee390e383b2936e"}, - {file = "yarl-1.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b8486f322d8f6a38539136a22c55f94d269addb24db5cb6f61adc61eabc9d93"}, - {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fce4da3703ee6048ad4138fe74619c50874afe98b1ad87b2698ef95bf92c96d"}, - {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed653638ef669e0efc6fe2acb792275cb419bf9cb5c5049399f3556995f23c7"}, - {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18ac56c9dd70941ecad42b5a906820824ca72ff84ad6fa18db33c2537ae2e089"}, - {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:688654f8507464745ab563b041d1fb7dab5d9912ca6b06e61d1c4708366832f5"}, - {file = "yarl-1.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4973eac1e2ff63cf187073cd4e1f1148dcd119314ab79b88e1b3fad74a18c9d5"}, - {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:964a428132227edff96d6f3cf261573cb0f1a60c9a764ce28cda9525f18f7786"}, - {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6d23754b9939cbab02c63434776df1170e43b09c6a517585c7ce2b3d449b7318"}, - {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c2dc4250fe94d8cd864d66018f8344d4af50e3758e9d725e94fecfa27588ff82"}, - {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09696438cb43ea6f9492ef237761b043f9179f455f405279e609f2bc9100212a"}, - {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:999bfee0a5b7385a0af5ffb606393509cfde70ecca4f01c36985be6d33e336da"}, - {file = "yarl-1.11.1-cp313-cp313-win32.whl", hash = "sha256:ce928c9c6409c79e10f39604a7e214b3cb69552952fbda8d836c052832e6a979"}, - {file = "yarl-1.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:501c503eed2bb306638ccb60c174f856cc3246c861829ff40eaa80e2f0330367"}, - {file = "yarl-1.11.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dae7bd0daeb33aa3e79e72877d3d51052e8b19c9025ecf0374f542ea8ec120e4"}, - {file = "yarl-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3ff6b1617aa39279fe18a76c8d165469c48b159931d9b48239065767ee455b2b"}, - {file = "yarl-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3257978c870728a52dcce8c2902bf01f6c53b65094b457bf87b2644ee6238ddc"}, - {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f351fa31234699d6084ff98283cb1e852270fe9e250a3b3bf7804eb493bd937"}, - {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aef1b64da41d18026632d99a06b3fefe1d08e85dd81d849fa7c96301ed22f1b"}, - {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7175a87ab8f7fbde37160a15e58e138ba3b2b0e05492d7351314a250d61b1591"}, - {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba444bdd4caa2a94456ef67a2f383710928820dd0117aae6650a4d17029fa25e"}, - {file = "yarl-1.11.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ea9682124fc062e3d931c6911934a678cb28453f957ddccf51f568c2f2b5e05"}, - {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8418c053aeb236b20b0ab8fa6bacfc2feaaf7d4683dd96528610989c99723d5f"}, - {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:61a5f2c14d0a1adfdd82258f756b23a550c13ba4c86c84106be4c111a3a4e413"}, - {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f3a6d90cab0bdf07df8f176eae3a07127daafcf7457b997b2bf46776da2c7eb7"}, - {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:077da604852be488c9a05a524068cdae1e972b7dc02438161c32420fb4ec5e14"}, - {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:15439f3c5c72686b6c3ff235279630d08936ace67d0fe5c8d5bbc3ef06f5a420"}, - {file = "yarl-1.11.1-cp38-cp38-win32.whl", hash = "sha256:238a21849dd7554cb4d25a14ffbfa0ef380bb7ba201f45b144a14454a72ffa5a"}, - {file = "yarl-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:67459cf8cf31da0e2cbdb4b040507e535d25cfbb1604ca76396a3a66b8ba37a6"}, - {file = "yarl-1.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:884eab2ce97cbaf89f264372eae58388862c33c4f551c15680dd80f53c89a269"}, - {file = "yarl-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a336eaa7ee7e87cdece3cedb395c9657d227bfceb6781295cf56abcd3386a26"}, - {file = "yarl-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87f020d010ba80a247c4abc335fc13421037800ca20b42af5ae40e5fd75e7909"}, - {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:637c7ddb585a62d4469f843dac221f23eec3cbad31693b23abbc2c366ad41ff4"}, - {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48dfd117ab93f0129084577a07287376cc69c08138694396f305636e229caa1a"}, - {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e0ae31fb5ccab6eda09ba1494e87eb226dcbd2372dae96b87800e1dcc98804"}, - {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f46f81501160c28d0c0b7333b4f7be8983dbbc161983b6fb814024d1b4952f79"}, - {file = "yarl-1.11.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04293941646647b3bfb1719d1d11ff1028e9c30199509a844da3c0f5919dc520"}, - {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:250e888fa62d73e721f3041e3a9abf427788a1934b426b45e1b92f62c1f68366"}, - {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e8f63904df26d1a66aabc141bfd258bf738b9bc7bc6bdef22713b4f5ef789a4c"}, - {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:aac44097d838dda26526cffb63bdd8737a2dbdf5f2c68efb72ad83aec6673c7e"}, - {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:267b24f891e74eccbdff42241c5fb4f974de2d6271dcc7d7e0c9ae1079a560d9"}, - {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6907daa4b9d7a688063ed098c472f96e8181733c525e03e866fb5db480a424df"}, - {file = "yarl-1.11.1-cp39-cp39-win32.whl", hash = "sha256:14438dfc5015661f75f85bc5adad0743678eefee266ff0c9a8e32969d5d69f74"}, - {file = "yarl-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:94d0caaa912bfcdc702a4204cd5e2bb01eb917fc4f5ea2315aa23962549561b0"}, - {file = "yarl-1.11.1-py3-none-any.whl", hash = "sha256:72bf26f66456baa0584eff63e44545c9f0eaed9b73cb6601b647c91f14c11f38"}, - {file = "yarl-1.11.1.tar.gz", hash = "sha256:1bb2d9e212fb7449b8fb73bc461b51eaa17cc8430b4a87d87be7b25052d92f53"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82e692fb325013a18a5b73a4fed5a1edaa7c58144dc67ad9ef3d604eccd451ad"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df4e82e68f43a07735ae70a2d84c0353e58e20add20ec0af611f32cd5ba43fb4"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec9dd328016d8d25702a24ee274932aebf6be9787ed1c28d021945d264235b3c"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5820bd4178e6a639b3ef1db8b18500a82ceab6d8b89309e121a6859f56585b05"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86c438ce920e089c8c2388c7dcc8ab30dfe13c09b8af3d306bcabb46a053d6f7"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3de86547c820e4f4da4606d1c8ab5765dd633189791f15247706a2eeabc783ae"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca53632007c69ddcdefe1e8cbc3920dd88825e618153795b57e6ebcc92e752a"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4ee1d240b84e2f213565f0ec08caef27a0e657d4c42859809155cf3a29d1735"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c49f3e379177f4477f929097f7ed4b0622a586b0aa40c07ac8c0f8e40659a1ac"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5c5e32fef09ce101fe14acd0f498232b5710effe13abac14cd95de9c274e689e"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab9524e45ee809a083338a749af3b53cc7efec458c3ad084361c1dbf7aaf82a2"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b1481c048fe787f65e34cb06f7d6824376d5d99f1231eae4778bbe5c3831076d"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:31497aefd68036d8e31bfbacef915826ca2e741dbb97a8d6c7eac66deda3b606"}, + {file = "yarl-1.13.1-cp310-cp310-win32.whl", hash = "sha256:1fa56f34b2236f5192cb5fceba7bbb09620e5337e0b6dfe2ea0ddbd19dd5b154"}, + {file = "yarl-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:1bbb418f46c7f7355084833051701b2301092e4611d9e392360c3ba2e3e69f88"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:216a6785f296169ed52cd7dcdc2612f82c20f8c9634bf7446327f50398732a51"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40c6e73c03a6befb85b72da213638b8aaa80fe4136ec8691560cf98b11b8ae6e"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2430cf996113abe5aee387d39ee19529327205cda975d2b82c0e7e96e5fdabdc"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fb4134cc6e005b99fa29dbc86f1ea0a298440ab6b07c6b3ee09232a3b48f495"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309c104ecf67626c033845b860d31594a41343766a46fa58c3309c538a1e22b2"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f90575e9fe3aae2c1e686393a9689c724cd00045275407f71771ae5d690ccf38"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d2e1626be8712333a9f71270366f4a132f476ffbe83b689dd6dc0d114796c74"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b66c87da3c6da8f8e8b648878903ca54589038a0b1e08dde2c86d9cd92d4ac9"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cf1ad338620249f8dd6d4b6a91a69d1f265387df3697ad5dc996305cf6c26fb2"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9915300fe5a0aa663c01363db37e4ae8e7c15996ebe2c6cce995e7033ff6457f"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:703b0f584fcf157ef87816a3c0ff868e8c9f3c370009a8b23b56255885528f10"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1d8e3ca29f643dd121f264a7c89f329f0fcb2e4461833f02de6e39fef80f89da"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7055bbade838d68af73aea13f8c86588e4bcc00c2235b4b6d6edb0dbd174e246"}, + {file = "yarl-1.13.1-cp311-cp311-win32.whl", hash = "sha256:a3442c31c11088e462d44a644a454d48110f0588de830921fd201060ff19612a"}, + {file = "yarl-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:81bad32c8f8b5897c909bf3468bf601f1b855d12f53b6af0271963ee67fff0d2"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f452cc1436151387d3d50533523291d5f77c6bc7913c116eb985304abdbd9ec9"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9cec42a20eae8bebf81e9ce23fb0d0c729fc54cf00643eb251ce7c0215ad49fe"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d959fe96e5c2712c1876d69af0507d98f0b0e8d81bee14cfb3f6737470205419"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c837ab90c455f3ea8e68bee143472ee87828bff19ba19776e16ff961425b57"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94a993f976cdcb2dc1b855d8b89b792893220db8862d1a619efa7451817c836b"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2442a415a5f4c55ced0fade7b72123210d579f7d950e0b5527fc598866e62c"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fdbf0418489525231723cdb6c79e7738b3cbacbaed2b750cb033e4ea208f220"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b7f6e699304717fdc265a7e1922561b02a93ceffdaefdc877acaf9b9f3080b8"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bcd5bf4132e6a8d3eb54b8d56885f3d3a38ecd7ecae8426ecf7d9673b270de43"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2a93a4557f7fc74a38ca5a404abb443a242217b91cd0c4840b1ebedaad8919d4"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:22b739f99c7e4787922903f27a892744189482125cc7b95b747f04dd5c83aa9f"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2db874dd1d22d4c2c657807562411ffdfabec38ce4c5ce48b4c654be552759dc"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4feaaa4742517eaceafcbe74595ed335a494c84634d33961214b278126ec1485"}, + {file = "yarl-1.13.1-cp312-cp312-win32.whl", hash = "sha256:bbf9c2a589be7414ac4a534d54e4517d03f1cbb142c0041191b729c2fa23f320"}, + {file = "yarl-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:d07b52c8c450f9366c34aa205754355e933922c79135125541daae6cbf31c799"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:95c6737f28069153c399d875317f226bbdea939fd48a6349a3b03da6829fb550"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cd66152561632ed4b2a9192e7f8e5a1d41e28f58120b4761622e0355f0fe034c"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6a2acde25be0cf9be23a8f6cbd31734536a264723fca860af3ae5e89d771cd71"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18595e6a2ee0826bf7dfdee823b6ab55c9b70e8f80f8b77c37e694288f5de1"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a31d21089894942f7d9a8df166b495101b7258ff11ae0abec58e32daf8088813"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45f209fb4bbfe8630e3d2e2052535ca5b53d4ce2d2026bed4d0637b0416830da"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f722f30366474a99745533cc4015b1781ee54b08de73260b2bbe13316079851"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3bf60444269345d712838bb11cc4eadaf51ff1a364ae39ce87a5ca8ad3bb2c8"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:942c80a832a79c3707cca46bd12ab8aa58fddb34b1626d42b05aa8f0bcefc206"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:44b07e1690f010c3c01d353b5790ec73b2f59b4eae5b0000593199766b3f7a5c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:396e59b8de7e4d59ff5507fb4322d2329865b909f29a7ed7ca37e63ade7f835c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3bb83a0f12701c0b91112a11148b5217617982e1e466069d0555be9b372f2734"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c92b89bffc660f1274779cb6fbb290ec1f90d6dfe14492523a0667f10170de26"}, + {file = "yarl-1.13.1-cp313-cp313-win32.whl", hash = "sha256:269c201bbc01d2cbba5b86997a1e0f73ba5e2f471cfa6e226bcaa7fd664b598d"}, + {file = "yarl-1.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:1d0828e17fa701b557c6eaed5edbd9098eb62d8838344486248489ff233998b8"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8be8cdfe20787e6a5fcbd010f8066227e2bb9058331a4eccddec6c0db2bb85b2"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08d7148ff11cb8e886d86dadbfd2e466a76d5dd38c7ea8ebd9b0e07946e76e4b"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4afdf84610ca44dcffe8b6c22c68f309aff96be55f5ea2fa31c0c225d6b83e23"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0d12fe78dcf60efa205e9a63f395b5d343e801cf31e5e1dda0d2c1fb618073d"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298c1eecfd3257aa16c0cb0bdffb54411e3e831351cd69e6b0739be16b1bdaa8"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c14c16831b565707149c742d87a6203eb5597f4329278446d5c0ae7a1a43928e"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9bacedbb99685a75ad033fd4de37129449e69808e50e08034034c0bf063f99"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658e8449b84b92a4373f99305de042b6bd0d19bf2080c093881e0516557474a5"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:373f16f38721c680316a6a00ae21cc178e3a8ef43c0227f88356a24c5193abd6"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:45d23c4668d4925688e2ea251b53f36a498e9ea860913ce43b52d9605d3d8177"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f7917697bcaa3bc3e83db91aa3a0e448bf5cde43c84b7fc1ae2427d2417c0224"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5989a38ba1281e43e4663931a53fbf356f78a0325251fd6af09dd03b1d676a09"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11b3ca8b42a024513adce810385fcabdd682772411d95bbbda3b9ed1a4257644"}, + {file = "yarl-1.13.1-cp38-cp38-win32.whl", hash = "sha256:dcaef817e13eafa547cdfdc5284fe77970b891f731266545aae08d6cce52161e"}, + {file = "yarl-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:7addd26594e588503bdef03908fc207206adac5bd90b6d4bc3e3cf33a829f57d"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a0ae6637b173d0c40b9c1462e12a7a2000a71a3258fa88756a34c7d38926911c"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:576365c9f7469e1f6124d67b001639b77113cfd05e85ce0310f5f318fd02fe85"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78f271722423b2d4851cf1f4fa1a1c4833a128d020062721ba35e1a87154a049"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d74f3c335cfe9c21ea78988e67f18eb9822f5d31f88b41aec3a1ec5ecd32da5"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1891d69a6ba16e89473909665cd355d783a8a31bc84720902c5911dbb6373465"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb382fd7b4377363cc9f13ba7c819c3c78ed97c36a82f16f3f92f108c787cbbf"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8854b9f80693d20cec797d8e48a848c2fb273eb6f2587b57763ccba3f3bd4b"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbf2c3f04ff50f16404ce70f822cdc59760e5e2d7965905f0e700270feb2bbfc"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fb9f59f3848edf186a76446eb8bcf4c900fe147cb756fbbd730ef43b2e67c6a7"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ef9b85fa1bc91c4db24407e7c4da93a5822a73dd4513d67b454ca7064e8dc6a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:098b870c18f1341786f290b4d699504e18f1cd050ed179af8123fd8232513424"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8c723c91c94a3bc8033dd2696a0f53e5d5f8496186013167bddc3fb5d9df46a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44a4c40a6f84e4d5955b63462a0e2a988f8982fba245cf885ce3be7618f6aa7d"}, + {file = "yarl-1.13.1-cp39-cp39-win32.whl", hash = "sha256:84bbcdcf393139f0abc9f642bf03f00cac31010f3034faa03224a9ef0bb74323"}, + {file = "yarl-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:fc2931ac9ce9c61c9968989ec831d3a5e6fcaaff9474e7cfa8de80b7aff5a093"}, + {file = "yarl-1.13.1-py3-none-any.whl", hash = "sha256:6a5185ad722ab4dd52d5fb1f30dcc73282eb1ed494906a92d1a228d3f89607b0"}, + {file = "yarl-1.13.1.tar.gz", hash = "sha256:ec8cfe2295f3e5e44c51f57272afbd69414ae629ec7c6b27f5a410efc78b70a0"}, ] [package.dependencies] @@ -4337,5 +4372,4 @@ torch = ["torch"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" - -content-hash = "04e8c5a6806fa7e3a34f322dd63ba242cef635929360f57d126082edd1d9987c" +content-hash = "0a58b82476e4794adbc5768d38911e1935a9c3941cc40499001199383fc8c8ec" From 751b949c55c6970179d489ce7615d7eddc24411c Mon Sep 17 00:00:00 2001 From: Li Yin Date: Sun, 6 Oct 2024 15:12:00 +0800 Subject: [PATCH 12/40] fix the test --- adalflow/adalflow/utils/lazy_import.py | 1 - 1 file changed, 1 deletion(-) diff --git a/adalflow/adalflow/utils/lazy_import.py b/adalflow/adalflow/utils/lazy_import.py index cf66d383..3451714b 100644 --- a/adalflow/adalflow/utils/lazy_import.py +++ b/adalflow/adalflow/utils/lazy_import.py @@ -43,7 +43,6 @@ class OptionalPackages(Enum): GROQ = ("groq", "Please install groq with: pip install groq") OPENAI = ("openai", "Please install openai with: pip install openai") ANTHROPIC = ("anthropic", "Please install anthropic with: pip install anthropic") - BOTO3 = ("boto3", "Please install boto3 with: pip install boto3") GOOGLE_GENERATIVEAI = ( "google.generativeai", "Please install google-generativeai with: pip install google-generativeai", From 1c8584264d6cb704199206bdb93d7fa78833ef53 Mon Sep 17 00:00:00 2001 From: ajithvcoder Date: Mon, 11 Nov 2024 11:59:50 +0530 Subject: [PATCH 13/40] add modelclient notebook tutorial --- .../tutorials/adalflow_modelclient.ipynb | 2057 +++++++++++++++++ 1 file changed, 2057 insertions(+) create mode 100644 notebooks/tutorials/adalflow_modelclient.ipynb diff --git a/notebooks/tutorials/adalflow_modelclient.ipynb b/notebooks/tutorials/adalflow_modelclient.ipynb new file mode 100644 index 00000000..bd77a449 --- /dev/null +++ b/notebooks/tutorials/adalflow_modelclient.ipynb @@ -0,0 +1,2057 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "hGLYrUwBmvUD" + }, + "source": [ + "\n", + " \"Open\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "gHK6HFngl6iP" + }, + "source": [ + "# 🤗 Welcome to AdalFlow!\n", + "## The PyTorch library to auto-optimize any LLM task pipelines\n", + "\n", + "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", + "\n", + "\n", + "# Quick Links\n", + "\n", + "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", + "\n", + "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", + "\n", + "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", + "\n", + "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", + "\n", + "# Author\n", + "This notebook was created by community contributor [Ajith](https://github.com/ajithvcoder/).\n", + "\n", + "# Outline\n", + "\n", + "This is a quick introduction of what AdalFlow is capable of. We will cover:\n", + "\n", + "* How to use model client in sync and async calls\n", + "* How to do develop custom model client using adalflow\n", + "\n", + "**Next: Try our [auto-optimization](https://colab.research.google.com/drive/1n3mHUWekTEYHiBdYBTw43TKlPN41A9za?usp=sharing)**\n", + "\n", + "\n", + "# Installation\n", + "\n", + "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq`, and `faiss`(cpu version) from the extra packages.\n", + "\n", + " ```bash\n", + " pip install adalflow[openai,groq,faiss-cpu]\n", + " ```\n", + "2. Setup `openai` and `groq` API key in the environment variables" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "nqe-vxB1BCux" + }, + "source": [ + "### Install adalflow" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "ZaaevxNH9JMQ" + }, + "outputs": [], + "source": [ + "# Install adalflow with necessary dependencies\n", + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "\n", + "clear_output()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NGE70aZ8BLuf" + }, + "source": [ + "### Set Environment Variables\n", + "\n", + "Note: Enter your api keys in below cell #todo" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "j2xmGr_99YDq", + "outputId": "5f4ef3fe-3c20-481b-e4f6-93c14af1fd32" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Writing .env\n" + ] + } + ], + "source": [ + "%%writefile .env\n", + "\n", + "OPENAI_API_KEY=\"PASTE-OPENAI_API_KEY_HERE\"\n", + "GROQ_API_KEY=\"PASTE-GROQ_API_KEY-HERE\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "id": "etSUh9KNjmdy" + }, + "outputs": [], + "source": [ + "from adalflow.utils import setup_env\n", + "\n", + "# Load environment variables - Make sure to have OPENAI_API_KEY in .env file and .env is present in current folder\n", + "setup_env(\".env\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ZxBkm77uBZpl" + }, + "source": [ + "### Basic Vannila Usage Example - model_client() - LLM Chat" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "id": "wOAiKg899Z2u" + }, + "outputs": [], + "source": [ + "from adalflow.components.model_client import OpenAIClient\n", + "from adalflow.core.types import ModelType" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "jv5124_27ioY", + "outputId": "8e593b49-4705-49c0-9501-58cee43831d1" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "api_kwargs: {'model': 'gpt-3.5-turbo', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': 'User: What is the capital of France?\\n'}]}\n", + "response_text: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=16, total_tokens=23), raw_response='The capital of France is Paris.', metadata=None)\n" + ] + } + ], + "source": [ + "# Initialize the OpenAI client for API interactions\n", + "openai_client = OpenAIClient()\n", + "query = \"What is the capital of France?\"\n", + "\n", + "# Set the model type to Large Language Model (LLM)\n", + "model_type = ModelType.LLM\n", + "\n", + "# Construct the prompt by formatting the user's query\n", + "prompt = f\"User: {query}\\n\"\n", + "\n", + "# Configure model parameters:\n", + "# - model: Specifies GPT-3.5-turbo as the model to use\n", + "# - temperature: Controls randomness (0.5 = balanced between deterministic and creative)\n", + "# - max_tokens: Limits the response length to 100 tokens\n", + "model_kwargs = {\"model\": \"gpt-3.5-turbo\", \"temperature\": 0.5, \"max_tokens\": 100}\n", + "\n", + "# Convert the inputs into the format required by OpenAI's API\n", + "api_kwargs = openai_client.convert_inputs_to_api_kwargs(\n", + " input=prompt, model_kwargs=model_kwargs, model_type=model_type\n", + ")\n", + "print(f\"api_kwargs: {api_kwargs}\")\n", + "\n", + "\n", + "response = openai_client.call(api_kwargs=api_kwargs, model_type=model_type)\n", + "\n", + "# Extract the text from the chat completion response\n", + "response_text = openai_client.parse_chat_completion(response)\n", + "print(f\"response_text: {response_text}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "MBW5viOG9hM8" + }, + "source": [ + "### Basic Vannila Usage Example - model_client() - Embedding" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "YA4pAIek9ewc", + "outputId": "442d9708-966d-498a-de06-6a4833ba93ac" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['What is the capital of France?', 'What is the capital of France?']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[0.63402575, 0.24025092, 0.42818537, 0.37026355, -0.3518905, -0.041650757, -0.21627253, 0.21798527], index=0), Embedding(embedding=[0.63402575, 0.24025092, 0.42818537, 0.37026355, -0.3518905, -0.041650757, -0.21627253, 0.21798527], index=1)], model='text-embedding-3-small', usage=Usage(prompt_tokens=14, total_tokens=14), error=None, raw_response=None, input=None)\n" + ] + } + ], + "source": [ + "openai_client = OpenAIClient()\n", + "query = \"What is the capital of France?\"\n", + "\n", + "# Specify the model type to be used, setting it to EMBEDDER for embedding functionality\n", + "model_type = ModelType.EMBEDDER\n", + "\n", + "# Create a batch of inputs by duplicating the query; useful for testing batch embedding capabilities\n", + "input = [query] * 2\n", + "\n", + "# Set the model's parameters:\n", + "# - \"text-embedding-3-small\" is the model being used\n", + "# - \"dimensions\" defines the dimensionality of the embeddings\n", + "# - \"encoding_format\" specifies the data format for the embeddings\n", + "model_kwargs = {\n", + " \"model\": \"text-embedding-3-small\",\n", + " \"dimensions\": 8,\n", + " \"encoding_format\": \"float\",\n", + "}\n", + "\n", + "# Convert the inputs and model parameters to the format expected by the API using OpenAI client's helper method\n", + "api_kwargs = openai_client.convert_inputs_to_api_kwargs(\n", + " input=input, model_kwargs=model_kwargs, model_type=model_type\n", + ")\n", + "print(f\"api_kwargs: {api_kwargs}\")\n", + "\n", + "response = openai_client.call(api_kwargs=api_kwargs, model_type=model_type)\n", + "\n", + "# Parse the embedding response to extract the embeddings in a usable format\n", + "reponse_embedder_output = openai_client.parse_embedding_response(response)\n", + "print(f\"reponse_embedder_output: {reponse_embedder_output}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "k42xTszF34Lx" + }, + "source": [ + "### Adalflow - model_client() - **OpenAI model** Embedding Usage (ModelType.EMBEDDER)" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": { + "id": "NPT6NmaKiKJ9" + }, + "outputs": [], + "source": [ + "from typing import List\n", + "import numpy as np\n", + "from adalflow.core.types import ModelType, EmbedderOutput\n", + "from adalflow.components.model_client import OpenAIClient\n", + "from dataclasses import dataclass\n", + "from enum import Enum\n", + "from numpy.linalg import norm" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": { + "id": "Uwtb2sejiN6z" + }, + "outputs": [], + "source": [ + "@dataclass\n", + "class EmbeddingCollection:\n", + " collection: List[float]\n", + " cindex: int\n", + "\n", + "\n", + "@dataclass\n", + "class Usage:\n", + " prompt_tokens: int\n", + " total_tokens: int" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": { + "id": "qyzVv9qviUB1" + }, + "outputs": [], + "source": [ + "openai_client = OpenAIClient()" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "id": "ufpyMmmZicVe" + }, + "outputs": [], + "source": [ + "def get_openai_embedding(text):\n", + " # Set model type to EMBEDDER for embedding functionality\n", + " model_type = ModelType.EMBEDDER\n", + "\n", + " # Prepare input and model-specific parameters\n", + " input = text\n", + " model_kwargs = {\n", + " \"model\": \"text-embedding-3-small\",\n", + " \"dimensions\": 8,\n", + " \"encoding_format\": \"float\",\n", + " }\n", + "\n", + " # Convert inputs to the required API format\n", + " api_kwargs = openai_client.convert_inputs_to_api_kwargs(\n", + " input=input, model_kwargs=model_kwargs, model_type=model_type\n", + " )\n", + " print(f\"api_kwargs: {api_kwargs}\") # Debug output to verify API arguments\n", + "\n", + " # Call OpenAI API and parse response for embeddings\n", + " response = openai_client.call(api_kwargs=api_kwargs, model_type=model_type)\n", + " reponse_embedder_output = openai_client.parse_embedding_response(response)\n", + " print(\n", + " f\"reponse_embedder_output: {reponse_embedder_output}\"\n", + " ) # Debug output to verify embeddings\n", + " return reponse_embedder_output\n", + "\n", + "\n", + "def process_embeddings(embeddings_collection):\n", + " # Extract embedding data for each item in the collection\n", + " embeddingOutput = [emb.collection for emb in embeddings_collection]\n", + " embeddingDataList = [each_emb_out.data for each_emb_out in embeddingOutput]\n", + " embeddingList = [\n", + " each_item.embedding\n", + " for each_emb_data in embeddingDataList\n", + " for each_item in each_emb_data\n", + " ]\n", + "\n", + " # Convert to numpy array for easier manipulation and calculations\n", + " embeddings_array = np.array(embeddingList)\n", + "\n", + " def calculate_similarity(emb1, emb2):\n", + " # Compute cosine similarity between two embeddings\n", + " return np.dot(emb1, emb2) / (norm(emb1) * norm(emb2))\n", + "\n", + " def get_average_embedding(embeddings_list):\n", + " # Calculate the mean embedding across a list of embeddings\n", + " return np.mean(embeddings_list, axis=0)\n", + "\n", + " def find_nearest_neighbors(\n", + " query_index: int, embedding_list: List[List[float]], k: int = 5\n", + " ):\n", + " # Find top-k most similar embeddings to a query embedding, based on cosine similarity\n", + " query_embedding = embedding_list[query_index]\n", + " similarities = [\n", + " (i, calculate_similarity(query_embedding, emb))\n", + " for i, emb in enumerate(embedding_list)\n", + " if i != query_index\n", + " ]\n", + " return sorted(similarities, key=lambda x: x[1], reverse=True)[:k]\n", + "\n", + " # Return dictionary of functions and processed data for further use\n", + " return {\n", + " \"embeddings_array\": embeddings_array,\n", + " \"calculate_similarity\": calculate_similarity,\n", + " \"average_embedding\": get_average_embedding,\n", + " \"find_nearest_neighbors\": find_nearest_neighbors,\n", + " }\n", + "\n", + "\n", + "# Demonstrate embeddings usage with sample data\n", + "def demonstrate_embeddings_usage(sample_embeddings, input_text_list):\n", + " # Initialize processor and retrieve embeddings array\n", + " processor = process_embeddings(sample_embeddings)\n", + " embeddings = processor[\"embeddings_array\"]\n", + "\n", + " print(\"1. Analyzing Semantic Similarities:\")\n", + " print(\"-\" * 50)\n", + "\n", + " # Select a few random indices for similarity testing\n", + " num_indices = 5\n", + " assert len(input_text_list) == len(embeddings)\n", + " indices = np.random.choice(len(input_text_list), num_indices, replace=False)\n", + " selected_text = np.array(input_text_list)[indices]\n", + " selected_embeddings = np.array(embeddings)[indices]\n", + "\n", + " # Display selected texts and their embeddings\n", + " print(\"Selected indices:\", indices)\n", + " print(\"Selected elements from array1:\", selected_text)\n", + " print(\"Selected elements from array2:\", selected_embeddings)\n", + "\n", + " # Calculate similarity between each pair of selected texts\n", + " for i in range(len(selected_text)):\n", + " for j in range(i + 1, len(selected_text)):\n", + " similarity = processor[\"calculate_similarity\"](\n", + " selected_embeddings[i], selected_embeddings[j]\n", + " )\n", + " print(f\"\\nComparing:\\n'{selected_text[i]}' \\nwith:\\n'{selected_text[j]}'\")\n", + " print(f\"Similarity score: {similarity:.4f}\")\n", + "\n", + " print(\"\\n2. Finding Nearest Neighbors:\")\n", + " print(\"-\" * 50)\n", + "\n", + " # Find and display the 3 nearest neighbors for the first text\n", + " query_idx = 0\n", + " neighbors = processor[\"find_nearest_neighbors\"](query_idx, embeddings, k=3)\n", + " print(f\"\\nQuery text: '{input_text_list[query_idx]}'\")\n", + " print(\"\\nNearest neighbors:\")\n", + "\n", + " for idx, similarity in neighbors:\n", + " print(f\"- '{input_text_list[idx]}' (similarity: {similarity:.4f})\")\n", + "\n", + " print(\"\\n3. Using Average Embeddings:\")\n", + " print(\"-\" * 50)\n", + "\n", + " # Calculate and compare the average embedding for texts containing \"Paris\"\n", + " paris_indices = [i for i, text in enumerate(input_text_list) if \"Paris\" in text]\n", + " paris_embeddings = embeddings[paris_indices]\n", + " avg_paris_embedding = processor[\"average_embedding\"](paris_embeddings)\n", + "\n", + " print(\"\\nComparing average 'Paris' embedding with all texts:\")\n", + " for i, text in enumerate(input_text_list):\n", + " similarity = processor[\"calculate_similarity\"](\n", + " avg_paris_embedding, embeddings[i]\n", + " )\n", + " print(f\"- '{text}' (similarity: {similarity:.4f})\")" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": { + "id": "EuvZkL2kifvF" + }, + "outputs": [], + "source": [ + "def run_model_client_embedding_usage():\n", + " # Define a set of sample texts to test embedding and similarity functionalities\n", + " sample_texts = [\n", + " \"What is the capital of France?\",\n", + " \"Paris is the capital of France.\",\n", + " \"What is the population of France?\",\n", + " \"How big is Paris?\",\n", + " \"What is the weather like in Paris?\",\n", + " ]\n", + "\n", + " # Duplicate each sample text to form an input list with repeated entries (for embedding testing)\n", + " input_text_list = [text for text in sample_texts for _ in range(2)]\n", + "\n", + " # Generate embeddings for each text in the input list, and store them in an EmbeddingCollection\n", + " embeddings_collection = [\n", + " EmbeddingCollection(collection=get_openai_embedding(text), cindex=i)\n", + " for i, text in enumerate(input_text_list)\n", + " ]\n", + " print(\n", + " embeddings_collection\n", + " ) # Debugging output to verify embeddings collection content\n", + "\n", + " # Demonstrate the usage of embeddings by analyzing similarities, finding neighbors, etc.\n", + " demonstrate_embeddings_usage(embeddings_collection, input_text_list)" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "M9EpJd_7izCf", + "outputId": "ed1d938c-da36-4d1d-8cdf-5449047af403" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['What is the capital of France?']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[0.63402575, 0.24025092, 0.42818537, 0.37026355, -0.3518905, -0.041650757, -0.21627253, 0.21798527], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None)\n", + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['What is the capital of France?']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[0.63402575, 0.24025092, 0.42818537, 0.37026355, -0.3518905, -0.041650757, -0.21627253, 0.21798527], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None)\n", + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['Paris is the capital of France.']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[0.32851914, 0.31952682, -0.22016178, -0.34650376, -0.31563017, 0.49667537, -0.3447053, 0.395362], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None)\n", + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['Paris is the capital of France.']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[0.32851914, 0.31952682, -0.22016178, -0.34650376, -0.31563017, 0.49667537, -0.3447053, 0.395362], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None)\n", + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['What is the population of France?']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[0.69818175, 0.33034775, 0.48146424, -0.041622937, -0.38599142, 0.13369127, -0.011028064, 0.05374008], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None)\n", + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['What is the population of France?']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[0.69818175, 0.33034775, 0.48146424, -0.041622937, -0.38599142, 0.13369127, -0.011028064, 0.05374008], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None)\n", + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['How big is Paris?']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[0.13988405, -0.35962427, 0.14219283, 0.0681765, -0.51662034, -0.116185255, -0.44545603, -0.58941436], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=5, total_tokens=5), error=None, raw_response=None, input=None)\n", + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['How big is Paris?']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[0.13988405, -0.35962427, 0.14219283, 0.0681765, -0.51662034, -0.116185255, -0.44545603, -0.58941436], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=5, total_tokens=5), error=None, raw_response=None, input=None)\n", + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['What is the weather like in Paris?']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[-0.16997108, -0.14322221, -0.6407088, -0.16881266, -0.08045719, -0.20030048, -0.021483332, -0.6815693], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=8, total_tokens=8), error=None, raw_response=None, input=None)\n", + "api_kwargs: {'model': 'text-embedding-3-small', 'dimensions': 8, 'encoding_format': 'float', 'input': ['What is the weather like in Paris?']}\n", + "reponse_embedder_output: EmbedderOutput(data=[Embedding(embedding=[-0.16997108, -0.14322221, -0.6407088, -0.16881266, -0.08045719, -0.20030048, -0.021483332, -0.6815693], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=8, total_tokens=8), error=None, raw_response=None, input=None)\n", + "[EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[0.63402575, 0.24025092, 0.42818537, 0.37026355, -0.3518905, -0.041650757, -0.21627253, 0.21798527], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None), cindex=0), EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[0.63402575, 0.24025092, 0.42818537, 0.37026355, -0.3518905, -0.041650757, -0.21627253, 0.21798527], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None), cindex=1), EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[0.32851914, 0.31952682, -0.22016178, -0.34650376, -0.31563017, 0.49667537, -0.3447053, 0.395362], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None), cindex=2), EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[0.32851914, 0.31952682, -0.22016178, -0.34650376, -0.31563017, 0.49667537, -0.3447053, 0.395362], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None), cindex=3), EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[0.69818175, 0.33034775, 0.48146424, -0.041622937, -0.38599142, 0.13369127, -0.011028064, 0.05374008], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None), cindex=4), EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[0.69818175, 0.33034775, 0.48146424, -0.041622937, -0.38599142, 0.13369127, -0.011028064, 0.05374008], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None), cindex=5), EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[0.13988405, -0.35962427, 0.14219283, 0.0681765, -0.51662034, -0.116185255, -0.44545603, -0.58941436], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=5, total_tokens=5), error=None, raw_response=None, input=None), cindex=6), EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[0.13988405, -0.35962427, 0.14219283, 0.0681765, -0.51662034, -0.116185255, -0.44545603, -0.58941436], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=5, total_tokens=5), error=None, raw_response=None, input=None), cindex=7), EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[-0.16997108, -0.14322221, -0.6407088, -0.16881266, -0.08045719, -0.20030048, -0.021483332, -0.6815693], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=8, total_tokens=8), error=None, raw_response=None, input=None), cindex=8), EmbeddingCollection(collection=EmbedderOutput(data=[Embedding(embedding=[-0.16997108, -0.14322221, -0.6407088, -0.16881266, -0.08045719, -0.20030048, -0.021483332, -0.6815693], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=8, total_tokens=8), error=None, raw_response=None, input=None), cindex=9)]\n", + "1. Analyzing Semantic Similarities:\n", + "--------------------------------------------------\n", + "Selected indices: [5 6 4 2 0]\n", + "Selected elements from array1: ['What is the population of France?' 'How big is Paris?'\n", + " 'What is the population of France?' 'Paris is the capital of France.'\n", + " 'What is the capital of France?']\n", + "Selected elements from array2: [[ 0.69818175 0.33034775 0.48146424 -0.04162294 -0.38599142 0.13369127\n", + " -0.01102806 0.05374008]\n", + " [ 0.13988405 -0.35962427 0.14219283 0.0681765 -0.51662034 -0.11618526\n", + " -0.44545603 -0.58941436]\n", + " [ 0.69818175 0.33034775 0.48146424 -0.04162294 -0.38599142 0.13369127\n", + " -0.01102806 0.05374008]\n", + " [ 0.32851914 0.31952682 -0.22016178 -0.34650376 -0.31563017 0.49667537\n", + " -0.3447053 0.395362 ]\n", + " [ 0.63402575 0.24025092 0.42818537 0.37026355 -0.3518905 -0.04165076\n", + " -0.21627253 0.21798527]]\n", + "\n", + "Comparing:\n", + "'What is the population of France?' \n", + "with:\n", + "'How big is Paris?'\n", + "Similarity score: 0.2016\n", + "\n", + "Comparing:\n", + "'What is the population of France?' \n", + "with:\n", + "'What is the population of France?'\n", + "Similarity score: 1.0000\n", + "\n", + "Comparing:\n", + "'What is the population of France?' \n", + "with:\n", + "'Paris is the capital of France.'\n", + "Similarity score: 0.4566\n", + "\n", + "Comparing:\n", + "'What is the population of France?' \n", + "with:\n", + "'What is the capital of France?'\n", + "Similarity score: 0.8571\n", + "\n", + "Comparing:\n", + "'How big is Paris?' \n", + "with:\n", + "'What is the population of France?'\n", + "Similarity score: 0.2016\n", + "\n", + "Comparing:\n", + "'How big is Paris?' \n", + "with:\n", + "'Paris is the capital of France.'\n", + "Similarity score: -0.0980\n", + "\n", + "Comparing:\n", + "'How big is Paris?' \n", + "with:\n", + "'What is the capital of France?'\n", + "Similarity score: 0.2429\n", + "\n", + "Comparing:\n", + "'What is the population of France?' \n", + "with:\n", + "'Paris is the capital of France.'\n", + "Similarity score: 0.4566\n", + "\n", + "Comparing:\n", + "'What is the population of France?' \n", + "with:\n", + "'What is the capital of France?'\n", + "Similarity score: 0.8571\n", + "\n", + "Comparing:\n", + "'Paris is the capital of France.' \n", + "with:\n", + "'What is the capital of France?'\n", + "Similarity score: 0.3136\n", + "\n", + "2. Finding Nearest Neighbors:\n", + "--------------------------------------------------\n", + "\n", + "Query text: 'What is the capital of France?'\n", + "\n", + "Nearest neighbors:\n", + "- 'What is the capital of France?' (similarity: 1.0000)\n", + "- 'What is the population of France?' (similarity: 0.8571)\n", + "- 'What is the population of France?' (similarity: 0.8571)\n", + "\n", + "3. Using Average Embeddings:\n", + "--------------------------------------------------\n", + "\n", + "Comparing average 'Paris' embedding with all texts:\n", + "- 'What is the capital of France?' (similarity: -0.0168)\n", + "- 'What is the capital of France?' (similarity: -0.0168)\n", + "- 'Paris is the capital of France.' (similarity: 0.3752)\n", + "- 'Paris is the capital of France.' (similarity: 0.3752)\n", + "- 'What is the population of France?' (similarity: 0.0897)\n", + "- 'What is the population of France?' (similarity: 0.0897)\n", + "- 'How big is Paris?' (similarity: 0.7366)\n", + "- 'How big is Paris?' (similarity: 0.7366)\n", + "- 'What is the weather like in Paris?' (similarity: 0.6574)\n", + "- 'What is the weather like in Paris?' (similarity: 0.6574)\n" + ] + } + ], + "source": [ + "run_model_client_embedding_usage()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "kOKN7jZAkZz0" + }, + "source": [ + "### Adalflow - model_client() - **OpenAI model** LLM Multichat Usage (ModelType.LLM)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "id": "VIQaK1dmkHiJ" + }, + "outputs": [], + "source": [ + "from adalflow.components.model_client import OpenAIClient\n", + "from adalflow.core.types import ModelType\n", + "from adalflow.utils import setup_env\n", + "from typing import List, Dict" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "id": "BRRfPHh4kHY9" + }, + "outputs": [], + "source": [ + "class ChatConversation:\n", + " def __init__(self):\n", + " # Initialize the OpenAI client for managing API calls\n", + " self.openai_client = OpenAIClient()\n", + " # Initialize an empty conversation history to store chat messages\n", + " self.conversation_history: str = \"\"\n", + " # Model parameters to customize the API call\n", + " self.model_kwargs = {\n", + " \"model\": \"gpt-3.5-turbo\",\n", + " \"temperature\": 0.5, # Controls randomness; 0.5 for balanced responses\n", + " \"max_tokens\": 100, # Limits the response length\n", + " }\n", + "\n", + " def add_user_message(self, message: str):\n", + " \"\"\"Add a user message to the conversation history\"\"\"\n", + " self.conversation_history += (\n", + " f\" {message} \" # Format for user message\n", + " )\n", + "\n", + " def add_assistant_message(self, message: str):\n", + " \"\"\"Add an assistant message to the conversation history\"\"\"\n", + " self.conversation_history += (\n", + " f\" {message} \" # Format for assistant message\n", + " )\n", + "\n", + " def get_response(self) -> str:\n", + " \"\"\"Get response from the model based on conversation history\"\"\"\n", + " # Convert the conversation history and model parameters into API arguments\n", + " api_kwargs = self.openai_client.convert_inputs_to_api_kwargs(\n", + " input=self.conversation_history,\n", + " model_kwargs=self.model_kwargs,\n", + " model_type=ModelType.LLM,\n", + " )\n", + " print(f\"api_kwargs: {api_kwargs}\") # Debugging output to verify API parameters\n", + "\n", + " # Call the API with the generated arguments to get a response\n", + " response = self.openai_client.call(\n", + " api_kwargs=api_kwargs, model_type=ModelType.LLM\n", + " )\n", + " print(\"response: \", response) # Debugging output for raw API response\n", + "\n", + " # Extract and parse the text response from the API output\n", + " response_text = self.openai_client.parse_chat_completion(response)\n", + " # Update conversation history with the assistant's response\n", + " self.add_assistant_message(response_text)\n", + " return response_text # Return the assistant's response to the caller" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "id": "9HWtlBnZkHLU" + }, + "outputs": [], + "source": [ + "def check_chat_conversation():\n", + " # Initialize a new chat conversation\n", + " chat = ChatConversation()\n", + "\n", + " # Example list of user questions to simulate a multi-turn conversation\n", + " questions = [\n", + " \"What is the capital of France?\",\n", + " \"What is its population?\",\n", + " \"Tell me about its famous landmarks\",\n", + " ]\n", + "\n", + " # Iterate through each question in the list\n", + " for question in questions:\n", + " print(f\"\\nUser: {question}\") # Display the user's question\n", + " chat.add_user_message(\n", + " question\n", + " ) # Add the user question to the conversation history\n", + "\n", + " response = (\n", + " chat.get_response()\n", + " ) # Get assistant's response based on conversation history\n", + " print(f\"Assistant: {response}\") # Display the assistant's response\n", + "\n", + " # Display the full conversation history after all exchanges\n", + " print(\"\\nFull Conversation History:\")\n", + " print(chat.conversation_history) # Print the accumulated conversation history" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "mLFopVh0lJJh", + "outputId": "eb6d555e-1562-4c0c-ad94-57044a853eb4" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "User: What is the capital of France?\n", + "api_kwargs: {'model': 'gpt-3.5-turbo', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': ' What is the capital of France? '}]}\n", + "response: ChatCompletion(id='chatcmpl-ASHotWDnw55BOd5d3zWzs0ucxztJr', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='The capital of France is Paris.', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305047, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=20, total_tokens=27, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=20, total_tokens=27), raw_response='The capital of France is Paris.', metadata=None)\n", + "\n", + "User: What is its population?\n", + "api_kwargs: {'model': 'gpt-3.5-turbo', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': \" What is the capital of France? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=20, total_tokens=27), raw_response='The capital of France is Paris.', metadata=None) What is its population? \"}]}\n", + "response: ChatCompletion(id='chatcmpl-ASHouXidu63zZHiV9uFZ1rH5SFNYj', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='The population of Paris, the capital of France, is approximately 2.2 million people.', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305048, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=19, prompt_tokens=82, total_tokens=101, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=19, prompt_tokens=82, total_tokens=101), raw_response='The population of Paris, the capital of France, is approximately 2.2 million people.', metadata=None)\n", + "\n", + "User: Tell me about its famous landmarks\n", + "api_kwargs: {'model': 'gpt-3.5-turbo', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': \" What is the capital of France? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=20, total_tokens=27), raw_response='The capital of France is Paris.', metadata=None) What is its population? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=19, prompt_tokens=82, total_tokens=101), raw_response='The population of Paris, the capital of France, is approximately 2.2 million people.', metadata=None) Tell me about its famous landmarks \"}]}\n", + "response: ChatCompletion(id='chatcmpl-ASHovLb6YpzmwJ8Noc90GdeLpvvrW', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Paris, the capital of France, is known for its iconic landmarks such as the Eiffel Tower, Notre-Dame Cathedral, Louvre Museum, Champs-Élysées, and Arc de Triomphe. These landmarks attract millions of tourists from around the world each year and are symbols of the city's rich history and cultural heritage.\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305049, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=69, prompt_tokens=157, total_tokens=226, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=69, prompt_tokens=157, total_tokens=226), raw_response=\"Paris, the capital of France, is known for its iconic landmarks such as the Eiffel Tower, Notre-Dame Cathedral, Louvre Museum, Champs-Élysées, and Arc de Triomphe. These landmarks attract millions of tourists from around the world each year and are symbols of the city's rich history and cultural heritage.\", metadata=None)\n", + "\n", + "Full Conversation History:\n", + " What is the capital of France? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=20, total_tokens=27), raw_response='The capital of France is Paris.', metadata=None) What is its population? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=19, prompt_tokens=82, total_tokens=101), raw_response='The population of Paris, the capital of France, is approximately 2.2 million people.', metadata=None) Tell me about its famous landmarks GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=69, prompt_tokens=157, total_tokens=226), raw_response=\"Paris, the capital of France, is known for its iconic landmarks such as the Eiffel Tower, Notre-Dame Cathedral, Louvre Museum, Champs-Élysées, and Arc de Triomphe. These landmarks attract millions of tourists from around the world each year and are symbols of the city's rich history and cultural heritage.\", metadata=None) \n" + ] + } + ], + "source": [ + "# You can observe that each question is depended on previous question and the chat responds in apt manner\n", + "check_chat_conversation()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BhD8AJWyldK-" + }, + "source": [ + "### Adalflow - model_client() - **OpenAI model** LLM Multichat Usage (ModelType.LLM) - asynchronous (async())" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "id": "AUjjiCnulcF8" + }, + "outputs": [], + "source": [ + "import asyncio\n", + "from adalflow.components.model_client import OpenAIClient\n", + "from adalflow.core.types import ModelType\n", + "from typing import List" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "id": "Z5i9_q3vlo3C" + }, + "outputs": [], + "source": [ + "class ChatConversationAsync:\n", + " def __init__(self):\n", + " # Initialize with an asynchronous OpenAI client\n", + " self.openai_client = OpenAIClient()\n", + "\n", + " # Default model parameters for the chat\n", + " self.model_kwargs = {\n", + " \"model\": \"gpt-3.5-turbo\", # Model used for chat\n", + " \"temperature\": 0.5, # Controls randomness in response\n", + " \"max_tokens\": 100, # Maximum tokens in the generated response\n", + " }\n", + "\n", + " async def get_response(self, message: str) -> str:\n", + " \"\"\"Asynchronously get a response from the model for a given user message\"\"\"\n", + "\n", + " # Convert input message and model parameters into the format expected by the API\n", + " api_kwargs = self.openai_client.convert_inputs_to_api_kwargs(\n", + " input=message, # User's message input\n", + " model_kwargs=self.model_kwargs, # Model-specific settings\n", + " model_type=ModelType.LLM, # Specify the model type as a language model (LLM)\n", + " )\n", + " print(f\"api_kwargs: {api_kwargs}\") # Log the API arguments for debugging\n", + "\n", + " # Make an asynchronous API call to OpenAI's model\n", + " response = await self.openai_client.acall(\n", + " api_kwargs=api_kwargs, # Pass the prepared arguments\n", + " model_type=ModelType.LLM, # Specify the model type again\n", + " )\n", + " print(\"response: \", response) # Print the raw response from the API\n", + "\n", + " # Parse the API response to extract the assistant's reply (chat completion)\n", + " response_text = self.openai_client.parse_chat_completion(response)\n", + " return response_text # Return the parsed response text" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "id": "nNVmwsO2lrWX" + }, + "outputs": [], + "source": [ + "async def check_chat_conversations_async():\n", + " # Create an instance of ChatConversationAsync to handle asynchronous operations\n", + " chat = ChatConversationAsync()\n", + "\n", + " # List of unrelated questions that will be handled in parallel\n", + " questions = [\n", + " \"What is the capital of France?\", # Question 1\n", + " \"Is dog a wild animal?\", # Question 2\n", + " \"Tell me about amazon forest\", # Question 3\n", + " ]\n", + "\n", + " # Create a list of asynchronous tasks, one for each question\n", + " # Each task calls the get_response method asynchronously for a question\n", + " tasks = [chat.get_response(question) for question in questions]\n", + "\n", + " # Gather the results of all asynchronous tasks concurrently\n", + " responses = await asyncio.gather(*tasks)\n", + "\n", + " # Print the responses from the assistant along with the respective user questions\n", + " for question, response in zip(questions, responses):\n", + " print(f\"\\nUser: {question}\")\n", + " print(f\"Assistant: {response}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "T8hdUjjeltVo", + "outputId": "53dc0385-afb6-4268-c3cc-dde9385b565e" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "api_kwargs: {'model': 'gpt-3.5-turbo', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': 'What is the capital of France?'}]}\n", + "api_kwargs: {'model': 'gpt-3.5-turbo', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': 'Is dog a wild animal?'}]}\n", + "api_kwargs: {'model': 'gpt-3.5-turbo', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': 'Tell me about amazon forest'}]}\n", + "response: ChatCompletion(id='chatcmpl-ASHqEOWoBOIiulzd0aoXeyKKb9npb', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='The capital of France is Paris.', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305130, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=14, total_tokens=21, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "response: ChatCompletion(id='chatcmpl-ASHqE6WAmS4wnRMdD20PdjsdyOcuP', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Dogs were originally domesticated from wolves thousands of years ago, and they are now considered domestic animals. While they share some characteristics with wild animals, such as hunting instincts and pack behavior, dogs have been bred and trained by humans for companionship and various roles, such as working dogs, service animals, and pets. So, in general, dogs are not considered wild animals.', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305130, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=76, prompt_tokens=13, total_tokens=89, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "response: ChatCompletion(id='chatcmpl-ASHqEexoH4z6uZsDkoRwwmyFuoSyZ', choices=[Choice(finish_reason='length', index=0, logprobs=None, message=ChatCompletionMessage(content='The Amazon Rainforest, also known as the Amazon Jungle, is a vast and dense tropical rainforest that covers much of the Amazon Basin in South America. It is the largest rainforest in the world, spanning over 5.5 million square kilometers (2.1 million square miles) across nine countries, including Brazil, Peru, Colombia, and Venezuela.\\n\\nThe Amazon Rainforest is home to an incredibly diverse array of plant and animal species, many of which are found nowhere else on Earth. It', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305130, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=100, prompt_tokens=12, total_tokens=112, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "\n", + "User: What is the capital of France?\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=14, total_tokens=21), raw_response='The capital of France is Paris.', metadata=None)\n", + "\n", + "User: Is dog a wild animal?\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=76, prompt_tokens=13, total_tokens=89), raw_response='Dogs were originally domesticated from wolves thousands of years ago, and they are now considered domestic animals. While they share some characteristics with wild animals, such as hunting instincts and pack behavior, dogs have been bred and trained by humans for companionship and various roles, such as working dogs, service animals, and pets. So, in general, dogs are not considered wild animals.', metadata=None)\n", + "\n", + "User: Tell me about amazon forest\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=100, prompt_tokens=12, total_tokens=112), raw_response='The Amazon Rainforest, also known as the Amazon Jungle, is a vast and dense tropical rainforest that covers much of the Amazon Basin in South America. It is the largest rainforest in the world, spanning over 5.5 million square kilometers (2.1 million square miles) across nine countries, including Brazil, Peru, Colombia, and Venezuela.\\n\\nThe Amazon Rainforest is home to an incredibly diverse array of plant and animal species, many of which are found nowhere else on Earth. It', metadata=None)\n" + ] + } + ], + "source": [ + "# Run the asynchronous function if in a file\n", + "# asyncio.run(check_chat_conversations_async())\n", + "\n", + "# in jupyter notebook\n", + "await check_chat_conversations_async()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "_VvhvKmimfIt" + }, + "source": [ + "### Adalflow - model_client() - **OpenAI model** LLM Multichat Usage (ModelType.LLM) - Benchmark sync() vs async()" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "id": "tMT0BsaRmt6z" + }, + "outputs": [], + "source": [ + "import asyncio\n", + "import time\n", + "from adalflow.components.model_client import (\n", + " OpenAIClient,\n", + ") # Assuming OpenAIClient with .call() and .acall() is available\n", + "from adalflow.core.types import ModelType" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "id": "QrzYgdsEm9sz" + }, + "outputs": [], + "source": [ + "# Initialize the OpenAI client\n", + "openai_client = OpenAIClient()\n", + "\n", + "# Sample prompt for testing\n", + "prompt = \"Tell me a joke.\"\n", + "\n", + "model_kwargs = {\"model\": \"gpt-3.5-turbo\", \"temperature\": 0.5, \"max_tokens\": 100}" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "id": "amdQNSmvnB-L" + }, + "outputs": [], + "source": [ + "# Synchronous function for benchmarking .call()\n", + "def benchmark_sync_call(api_kwargs, runs=10):\n", + " \"\"\"\n", + " Benchmark the synchronous .call() method by running it multiple times.\n", + "\n", + " Parameters:\n", + " - api_kwargs: The arguments to be passed to the API call\n", + " - runs: The number of times to run the call (default is 10)\n", + " \"\"\"\n", + " # List to store responses\n", + " responses = []\n", + "\n", + " # Record the start time of the benchmark\n", + " start_time = time.time()\n", + "\n", + " # Perform synchronous API calls for the specified number of runs\n", + " responses = [\n", + " openai_client.call(\n", + " api_kwargs=api_kwargs, # API arguments\n", + " model_type=ModelType.LLM, # Model type (e.g., LLM for language models)\n", + " )\n", + " for _ in range(runs) # Repeat 'runs' times\n", + " ]\n", + "\n", + " # Record the end time after all calls are completed\n", + " end_time = time.time()\n", + "\n", + " # Output the results of each synchronous call\n", + " for i, response in enumerate(responses):\n", + " print(f\"sync call {i + 1} completed: {response}\")\n", + "\n", + " # Print the total time taken for all synchronous calls\n", + " print(f\"\\nSynchronous benchmark completed in {end_time - start_time:.2f} seconds\")\n", + "\n", + "\n", + "# Asynchronous function for benchmarking .acall()\n", + "async def benchmark_async_acall(api_kwargs, runs=10):\n", + " \"\"\"\n", + " Benchmark the asynchronous .acall() method by running it multiple times concurrently.\n", + "\n", + " Parameters:\n", + " - api_kwargs: The arguments to be passed to the API call\n", + " - runs: The number of times to run the asynchronous call (default is 10)\n", + " \"\"\"\n", + " # Record the start time of the benchmark\n", + " start_time = time.time()\n", + "\n", + " # Create a list of asynchronous tasks for the specified number of runs\n", + " tasks = [\n", + " openai_client.acall(\n", + " api_kwargs=api_kwargs, # API arguments\n", + " model_type=ModelType.LLM, # Model type (e.g., LLM for language models)\n", + " )\n", + " for _ in range(runs) # Repeat 'runs' times\n", + " ]\n", + "\n", + " # Execute all tasks concurrently and wait for them to finish\n", + " responses = await asyncio.gather(*tasks)\n", + "\n", + " # Record the end time after all tasks are completed\n", + " end_time = time.time()\n", + "\n", + " # Output the results of each asynchronous call\n", + " for i, response in enumerate(responses):\n", + " print(f\"Async call {i + 1} completed: {response}\")\n", + "\n", + " # Print the total time taken for all asynchronous calls\n", + " print(f\"\\nAsynchronous benchmark completed in {end_time - start_time:.2f} seconds\")" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "FNmiGG_bnD7Q", + "outputId": "242071e3-18e1-44aa-c99a-8fac71fbb84c" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Starting synchronous benchmark...\n", + "\n", + "sync call 1 completed: ChatCompletion(id='chatcmpl-ASHqYcxCVNAnLlsrnRvxh5cRrQOwf', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself? Because it was two-tired!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305150, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "sync call 2 completed: ChatCompletion(id='chatcmpl-ASHqZz3G3jqGlHtKjoO9mbYjjS1Af', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Why did the scarecrow win an award? Because he was outstanding in his field!', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305151, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "sync call 3 completed: ChatCompletion(id='chatcmpl-ASHqZJmWUUDSrVElavHZOCuvSNQ8q', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305151, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "sync call 4 completed: ChatCompletion(id='chatcmpl-ASHqdMDEfY4pVAAom6RbmvnsBYdc1', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself? Because it was two-tired!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305155, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "sync call 5 completed: ChatCompletion(id='chatcmpl-ASHqdrGYZofAuDFGyAPq7mPsub78v', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why don't scientists trust atoms?\\n\\nBecause they make up everything!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305155, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=13, prompt_tokens=12, total_tokens=25, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "sync call 6 completed: ChatCompletion(id='chatcmpl-ASHqerqL9a6ev9YRNaSzy3mskQOmY', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Why did the scarecrow win an award? Because he was outstanding in his field!', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305156, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "sync call 7 completed: ChatCompletion(id='chatcmpl-ASHqeQq3iPrHepIld9SSg2Pzsxc4N', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself? Because it was two tired!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305156, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=16, prompt_tokens=12, total_tokens=28, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "sync call 8 completed: ChatCompletion(id='chatcmpl-ASHqfD6yeMEwZ6StOT8Ncd00R3No1', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself? Because it was two tired!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305157, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=16, prompt_tokens=12, total_tokens=28, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "sync call 9 completed: ChatCompletion(id='chatcmpl-ASHqgl57WJILsEu4PUj59MayzYnZr', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why don't scientists trust atoms? \\n\\nBecause they make up everything!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305158, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=14, prompt_tokens=12, total_tokens=26, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "sync call 10 completed: ChatCompletion(id='chatcmpl-ASHqgoVKX9legJ43xcSkLR4kgRxTn', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Why was the math book sad?\\n\\nBecause it had too many problems.', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305158, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=14, prompt_tokens=12, total_tokens=26, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "\n", + "Synchronous benchmark completed in 8.77 seconds\n", + "\n", + "Starting asynchronous benchmark...\n", + "\n", + "Async call 1 completed: ChatCompletion(id='chatcmpl-ASHqhKMKo8PMbdyiYpNHBQW4oU3J7', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself? Because it was two tired!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=16, prompt_tokens=12, total_tokens=28, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Async call 2 completed: ChatCompletion(id='chatcmpl-ASHqhWwUpBXRbgKstjV6qei7FzgfG', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why don't scientists trust atoms?\\n\\nBecause they make up everything!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=13, prompt_tokens=12, total_tokens=25, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Async call 3 completed: ChatCompletion(id='chatcmpl-ASHqhgTqUmUh5FW2nwTyRLagoKMQ5', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Why did the scarecrow win an award? Because he was outstanding in his field!', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Async call 4 completed: ChatCompletion(id='chatcmpl-ASHqhYzaxguL4P2MDG1AakTiGMIyg', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Why did the scarecrow win an award? Because he was outstanding in his field!', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Async call 5 completed: ChatCompletion(id='chatcmpl-ASHqhsdbfpywUP4KBhqPvUNOcOm1x', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Why did the scarecrow win an award? Because he was outstanding in his field!', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Async call 6 completed: ChatCompletion(id='chatcmpl-ASHqhqdaOKxe5zjf4vpKZAFbH8x5n', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself? Because it was two tired!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=16, prompt_tokens=12, total_tokens=28, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Async call 7 completed: ChatCompletion(id='chatcmpl-ASHqhrXadr2Tf62QM4SAXjLg8iSql', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Why did the golfer bring two pairs of pants? In case he got a hole in one!', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=20, prompt_tokens=12, total_tokens=32, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Async call 8 completed: ChatCompletion(id='chatcmpl-ASHqh8mQiGew9qwCOY5UgUilx2SYL', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Why did the scarecrow win an award? Because he was outstanding in his field!', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Async call 9 completed: ChatCompletion(id='chatcmpl-ASHqhNWUNOnSj9LLE7utDW0wz7USX', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why don't scientists trust atoms?\\n\\nBecause they make up everything!\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=13, prompt_tokens=12, total_tokens=25, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "Async call 10 completed: ChatCompletion(id='chatcmpl-ASHqhX5u0K2xFoFxyhebnOI9WsT0l', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Why did the scarecrow win an award? Because he was outstanding in his field!', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305159, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=12, total_tokens=29, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "\n", + "Asynchronous benchmark completed in 0.75 seconds\n" + ] + } + ], + "source": [ + "api_kwargs = openai_client.convert_inputs_to_api_kwargs(\n", + " input=prompt, model_kwargs=model_kwargs, model_type=ModelType.LLM\n", + ")\n", + "\n", + "# Run both benchmarks\n", + "print(\"Starting synchronous benchmark...\\n\")\n", + "benchmark_sync_call(api_kwargs)\n", + "\n", + "print(\"\\nStarting asynchronous benchmark...\\n\")\n", + "await benchmark_async_acall(api_kwargs)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "QtbUd2K-nPaL" + }, + "source": [ + "### Adalflow - model_client() - **OpenAI model** LLM Multichat Usage (ModelType.LLM) - Additional Utils -\n", + "- get_first_message_content()\n", + "- get_all_messages_content()\n", + "- get_probabilities()" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "id": "ghyzD7tynO4A" + }, + "outputs": [], + "source": [ + "from adalflow.components.model_client import OpenAIClient\n", + "from adalflow.core.types import ModelType\n", + "from adalflow.utils import setup_env\n", + "from adalflow.components.model_client.openai_client import (\n", + " get_first_message_content,\n", + " get_all_messages_content,\n", + " get_probabilities,\n", + ")\n", + "from adalflow.core import Generator" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "id": "QAaOFTZVn4Yx" + }, + "outputs": [], + "source": [ + "def check_openai_additional_utils(func, model_kwargs):\n", + " \"\"\"\n", + " This function demonstrates the usage of the OpenAI client and a custom utility function\n", + " for generating responses from the LLM model, based on the given query in openai client.\n", + "\n", + " Parameters:\n", + " - func: A function that will be used to parse the chat completion (for custom parsing).\n", + " - model_kwargs: The additional model parameters (e.g., temperature, max_tokens) to be used in the model.\n", + "\n", + " Returns:\n", + " - output: The generated response from the model based on the query.\n", + " \"\"\"\n", + "\n", + " # Initialize the OpenAI client with a custom chat completion parser\n", + " openai_client = OpenAIClient(chat_completion_parser=func)\n", + "\n", + " # Define a sample query (user question)\n", + " query = \"What is the capital of France?\"\n", + "\n", + " # Set the model type to LLM (Large Language Model)\n", + " model_type = ModelType.LLM\n", + "\n", + " # Create the prompt by formatting the user query as a conversation\n", + " prompt = f\"User: {query}\\n\"\n", + "\n", + " # Define any additional parameters needed for the model (e.g., the input string)\n", + " prompt_kwargs = {\n", + " \"input_str\": \"What is the capital of France?\",\n", + " }\n", + "\n", + " # Initialize the Generator with the OpenAI client and model parameters\n", + " generator = Generator(model_client=openai_client, model_kwargs=model_kwargs)\n", + "\n", + " # Execute the generator to get a response for the prompt (using the defined prompt_kwargs)\n", + " output = generator(prompt_kwargs=prompt_kwargs)\n", + "\n", + " # Return the generated output (response from the LLM)\n", + " return output" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "id": "hNnBpFjkoXil" + }, + "outputs": [], + "source": [ + "def run_utils_functions():\n", + " \"\"\"\n", + " This function runs a series of utility functions using different model\n", + " configurations for generating responses. It demonstrates how to check\n", + " OpenAI model outputs using various utility functions.\n", + " \"\"\"\n", + "\n", + " # Define the model arguments for the probability-based function (with logprobs)\n", + " probability_model_kwargs = {\n", + " \"model\": \"gpt-3.5-turbo\", # Specify the model version\n", + " \"logprobs\": True, # Enable logprobs to get probability distributions for tokens\n", + " \"n\": 2, # Request 2 different completions for each query\n", + " }\n", + "\n", + " # Define general model arguments for most other functions\n", + " model_kwargs = {\n", + " \"model\": \"gpt-3.5-turbo\", # Specify the model version\n", + " \"temperature\": 0.5, # Control the randomness of responses (0 is deterministic)\n", + " \"max_tokens\": 100, # Set the maximum number of tokens (words) in the response\n", + " }\n", + "\n", + " # List of functions to run with corresponding model arguments\n", + " func_list = [\n", + " [\n", + " get_probabilities,\n", + " probability_model_kwargs,\n", + " ], # Function to get probabilities with specific kwargs\n", + " [\n", + " get_first_message_content,\n", + " model_kwargs,\n", + " ], # Function to get first message content\n", + " [\n", + " get_all_messages_content,\n", + " model_kwargs,\n", + " ], # Function to get all messages content in multi-chat scenarios\n", + " ]\n", + "\n", + " # Loop through each function and its corresponding arguments\n", + " for each_func in func_list:\n", + " # Check the function output using the specified arguments\n", + " result = check_openai_additional_utils(each_func[0], each_func[1])\n", + "\n", + " # Print the function and result for debugging purposes\n", + " print(f\"Function: {each_func[0].__name__}, Model Args: {each_func[1]}\")\n", + " print(f\"Result: {result}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "mU6kFzslo6qr", + "outputId": "29e6b00e-99d3-4189-d161-3c79806fd19d" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ChatCompletionTokenLogprob(token='The', bytes=[84, 104, 101], logprob=-7.076218e-05, top_logprobs=[]), ChatCompletionTokenLogprob(token=' capital', bytes=[32, 99, 97, 112, 105, 116, 97, 108], logprob=-1.9361265e-07, top_logprobs=[]), ChatCompletionTokenLogprob(token=' of', bytes=[32, 111, 102], logprob=-0.00020163313, top_logprobs=[]), ChatCompletionTokenLogprob(token=' France', bytes=[32, 70, 114, 97, 110, 99, 101], logprob=-1.2664457e-06, top_logprobs=[]), ChatCompletionTokenLogprob(token=' is', bytes=[32, 105, 115], logprob=-6.704273e-07, top_logprobs=[]), ChatCompletionTokenLogprob(token=' Paris', bytes=[32, 80, 97, 114, 105, 115], logprob=0.0, top_logprobs=[]), ChatCompletionTokenLogprob(token='.', bytes=[46], logprob=-2.1769476e-05, top_logprobs=[])]\n", + "[ChatCompletionTokenLogprob(token='The', bytes=[84, 104, 101], logprob=-7.076218e-05, top_logprobs=[]), ChatCompletionTokenLogprob(token=' capital', bytes=[32, 99, 97, 112, 105, 116, 97, 108], logprob=-1.9361265e-07, top_logprobs=[]), ChatCompletionTokenLogprob(token=' of', bytes=[32, 111, 102], logprob=-0.00020163313, top_logprobs=[]), ChatCompletionTokenLogprob(token=' France', bytes=[32, 70, 114, 97, 110, 99, 101], logprob=-1.2664457e-06, top_logprobs=[]), ChatCompletionTokenLogprob(token=' is', bytes=[32, 105, 115], logprob=-6.704273e-07, top_logprobs=[]), ChatCompletionTokenLogprob(token=' Paris', bytes=[32, 80, 97, 114, 105, 115], logprob=0.0, top_logprobs=[]), ChatCompletionTokenLogprob(token='.', bytes=[46], logprob=-2.1769476e-05, top_logprobs=[])]\n", + "Function: get_probabilities, Model Args: {'model': 'gpt-3.5-turbo', 'logprobs': True, 'n': 2}\n", + "Result: GeneratorOutput(id=None, data=[[TokenLogProb(token='The', logprob=-7.076218e-05), TokenLogProb(token=' capital', logprob=-1.9361265e-07), TokenLogProb(token=' of', logprob=-0.00020163313), TokenLogProb(token=' France', logprob=-1.2664457e-06), TokenLogProb(token=' is', logprob=-6.704273e-07), TokenLogProb(token=' Paris', logprob=0.0), TokenLogProb(token='.', logprob=-2.1769476e-05)], [TokenLogProb(token='The', logprob=-7.076218e-05), TokenLogProb(token=' capital', logprob=-1.9361265e-07), TokenLogProb(token=' of', logprob=-0.00020163313), TokenLogProb(token=' France', logprob=-1.2664457e-06), TokenLogProb(token=' is', logprob=-6.704273e-07), TokenLogProb(token=' Paris', logprob=0.0), TokenLogProb(token='.', logprob=-2.1769476e-05)]], error=None, usage=CompletionUsage(completion_tokens=14, prompt_tokens=48, total_tokens=62), raw_response=[[TokenLogProb(token='The', logprob=-7.076218e-05), TokenLogProb(token=' capital', logprob=-1.9361265e-07), TokenLogProb(token=' of', logprob=-0.00020163313), TokenLogProb(token=' France', logprob=-1.2664457e-06), TokenLogProb(token=' is', logprob=-6.704273e-07), TokenLogProb(token=' Paris', logprob=0.0), TokenLogProb(token='.', logprob=-2.1769476e-05)], [TokenLogProb(token='The', logprob=-7.076218e-05), TokenLogProb(token=' capital', logprob=-1.9361265e-07), TokenLogProb(token=' of', logprob=-0.00020163313), TokenLogProb(token=' France', logprob=-1.2664457e-06), TokenLogProb(token=' is', logprob=-6.704273e-07), TokenLogProb(token=' Paris', logprob=0.0), TokenLogProb(token='.', logprob=-2.1769476e-05)]], metadata=None)\n", + "Function: get_first_message_content, Model Args: {'model': 'gpt-3.5-turbo', 'temperature': 0.5, 'max_tokens': 100}\n", + "Result: GeneratorOutput(id=None, data='The capital of France is Paris.', error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=48, total_tokens=55), raw_response='The capital of France is Paris.', metadata=None)\n", + "Function: get_all_messages_content, Model Args: {'model': 'gpt-3.5-turbo', 'temperature': 0.5, 'max_tokens': 100}\n", + "Result: GeneratorOutput(id=None, data=['The capital of France is Paris.'], error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=48, total_tokens=55), raw_response=['The capital of France is Paris.'], metadata=None)\n" + ] + } + ], + "source": [ + "run_utils_functions()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "mkvEEtXLrDZm" + }, + "source": [ + "### Adalflow - model_client() - **Groq model** LLM Multichat Usage (ModelType.LLM)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": { + "id": "SFHuk3RErCvP" + }, + "outputs": [], + "source": [ + "from adalflow.components.model_client import GroqAPIClient\n", + "from adalflow.core.types import ModelType\n", + "from adalflow.utils import setup_env\n", + "from typing import List, Dict" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "id": "cN4hsbLdrS7k" + }, + "outputs": [], + "source": [ + "class ChatConversation:\n", + " def __init__(self):\n", + " \"\"\"\n", + " Initialize a new ChatConversation object.\n", + " - GroqAPIClient is used to interact with the Groq model.\n", + " - conversation_history keeps track of the conversation between the user and assistant.\n", + " - model_kwargs contains the model parameters like temperature and max tokens.\n", + " \"\"\"\n", + " self.groq_client = (\n", + " GroqAPIClient()\n", + " ) # Initialize GroqAPIClient for model interaction\n", + " self.conversation_history: str = (\n", + " \"\" # Initialize conversation history as an empty string\n", + " )\n", + " self.model_kwargs = {\n", + " \"model\": \"llama3-8b-8192\", # Specify the model to use\n", + " \"temperature\": 0.5, # Set the temperature for response variability\n", + " \"max_tokens\": 100, # Limit the number of tokens in the response\n", + " }\n", + "\n", + " def add_user_message(self, message: str):\n", + " \"\"\"\n", + " Add a user message to the conversation history in the required format.\n", + " The message is wrapped with tags for better processing by the assistant.\n", + " \"\"\"\n", + " self.conversation_history += (\n", + " f\" {message} \" # Append user message to history\n", + " )\n", + "\n", + " def add_assistant_message(self, message: str):\n", + " \"\"\"\n", + " Add an assistant message to the conversation history in the required format.\n", + " The message is wrapped with tags for better processing.\n", + " \"\"\"\n", + " self.conversation_history += (\n", + " f\" {message} \" # Append assistant message to history\n", + " )\n", + "\n", + " def get_response(self) -> str:\n", + " \"\"\"\n", + " Generate a response from the assistant based on the conversation history.\n", + " - Converts the conversation history and model kwargs into the format required by the Groq API.\n", + " - Calls the API to get the response.\n", + " - Parses and adds the assistant's reply to the conversation history.\n", + " \"\"\"\n", + " # Prepare the request for the Groq API, converting the inputs into the correct format\n", + " api_kwargs = self.groq_client.convert_inputs_to_api_kwargs(\n", + " input=self.conversation_history, # Use the conversation history as input\n", + " model_kwargs=self.model_kwargs, # Include model-specific parameters\n", + " model_type=ModelType.LLM, # Specify the model type (Large Language Model)\n", + " )\n", + " print(f\"api_kwargs: {api_kwargs}\") # Log the API request parameters\n", + "\n", + " # Call the Groq model API to get the response\n", + " response = self.groq_client.call(\n", + " api_kwargs=api_kwargs,\n", + " model_type=ModelType.LLM, # Specify the model type again for clarity\n", + " )\n", + " print(\"response: \", response) # Log the API response\n", + "\n", + " # Parse the response to extract the assistant's reply\n", + " response_text = self.groq_client.parse_chat_completion(response)\n", + "\n", + " # Add the assistant's message to the conversation history\n", + " self.add_assistant_message(response_text)\n", + "\n", + " # Return the assistant's response text\n", + " return response_text" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "id": "pvqsFTEsrV2M" + }, + "outputs": [], + "source": [ + "def check_chat_conversation():\n", + " \"\"\"\n", + " This function simulates a multi-turn conversation between a user and an assistant.\n", + " It demonstrates how user inputs are processed, and the assistant generates responses,\n", + " while maintaining the conversation history for each query.\n", + " \"\"\"\n", + " # Initialize the ChatConversation object\n", + " chat = ChatConversation() # This creates an instance of the ChatConversation class\n", + "\n", + " # Define a list of user questions for a multi-turn conversation\n", + " questions = [\n", + " \"What is the capital of France?\", # First user question\n", + " \"What is its population?\", # Second user question\n", + " \"Tell me about its famous landmarks\", # Third user question\n", + " ]\n", + "\n", + " # Loop through each question and get the assistant's response\n", + " for question in questions:\n", + " # Print the current question from the user\n", + " print(f\"\\nUser: {question}\")\n", + "\n", + " # Add the user's message to the conversation history\n", + " chat.add_user_message(question)\n", + "\n", + " # Get the assistant's response based on the conversation history\n", + " response = chat.get_response()\n", + "\n", + " # Print the assistant's response\n", + " print(f\"Assistant: {response}\")\n", + "\n", + " # After the conversation, print the full conversation history\n", + " print(\"\\nFull Conversation History:\")\n", + " print(\n", + " chat.conversation_history\n", + " ) # This will print all messages (user and assistant) in the conversation history" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "dBNWQn_arXcE", + "outputId": "743e5d80-8a6b-4b0f-cff2-af11f0df051d" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "User: What is the capital of France?\n", + "api_kwargs: {'model': 'llama3-8b-8192', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': ' What is the capital of France? '}]}\n", + "response: ChatCompletion(id='chatcmpl-c68fccb5-ed2b-4745-be81-acbac792387f', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='The capital of France is Paris.', role='assistant', function_call=None, tool_calls=None))], created=1731305352, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_a97cfe35ae', usage=CompletionUsage(completion_tokens=8, prompt_tokens=23, total_tokens=31, completion_time=0.006666667, prompt_time=0.003034232, queue_time=0.010475318, total_time=0.009700899), x_groq={'id': 'req_01jccxebfgf5qbnaea72y9atrm'})\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=8, prompt_tokens=23, total_tokens=31), raw_response='The capital of France is Paris.', metadata=None)\n", + "\n", + "User: What is its population?\n", + "api_kwargs: {'model': 'llama3-8b-8192', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': \" What is the capital of France? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=8, prompt_tokens=23, total_tokens=31), raw_response='The capital of France is Paris.', metadata=None) What is its population? \"}]}\n", + "response: ChatCompletion(id='chatcmpl-e6ff7c1e-437c-49d9-bef7-5c6834d3e169', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='The population of Paris, the capital of France, is approximately 2.1 million people within its city limits. However, the metropolitan area of Paris, which includes the surrounding suburbs, has a population of over 12.2 million people, making it one of the most populous metropolitan areas in Europe.', role='assistant', function_call=None, tool_calls=None))], created=1731305352, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_179b0f92c9', usage=CompletionUsage(completion_tokens=62, prompt_tokens=85, total_tokens=147, completion_time=0.051666667, prompt_time=0.003680399, queue_time=0.009721731, total_time=0.055347066), x_groq={'id': 'req_01jccxebk7ejstbdxzerdj643q'})\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=62, prompt_tokens=85, total_tokens=147), raw_response='The population of Paris, the capital of France, is approximately 2.1 million people within its city limits. However, the metropolitan area of Paris, which includes the surrounding suburbs, has a population of over 12.2 million people, making it one of the most populous metropolitan areas in Europe.', metadata=None)\n", + "\n", + "User: Tell me about its famous landmarks\n", + "api_kwargs: {'model': 'llama3-8b-8192', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': \" What is the capital of France? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=8, prompt_tokens=23, total_tokens=31), raw_response='The capital of France is Paris.', metadata=None) What is its population? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=62, prompt_tokens=85, total_tokens=147), raw_response='The population of Paris, the capital of France, is approximately 2.1 million people within its city limits. However, the metropolitan area of Paris, which includes the surrounding suburbs, has a population of over 12.2 million people, making it one of the most populous metropolitan areas in Europe.', metadata=None) Tell me about its famous landmarks \"}]}\n", + "response: ChatCompletion(id='chatcmpl-6d202bb8-d1fc-471e-a7cd-9dd63fe4f9b8', choices=[Choice(finish_reason='length', index=0, logprobs=None, message=ChatCompletionMessage(content=\"GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=50, prompt_tokens=74, total_tokens=124), raw_response='Paris, the capital of France, is famous for its stunning architecture, art museums, and iconic landmarks. Some of the most famous landmarks in Paris include:\\n\\n* The Eiffel Tower: Built for the 1889 World\\\\'s Fair, the Eiffel Tower is an iron lattice tower that stands 324 meters tall and is\", role='assistant', function_call=None, tool_calls=None))], created=1731305352, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_179b0f92c9', usage=CompletionUsage(completion_tokens=100, prompt_tokens=202, total_tokens=302, completion_time=0.083333333, prompt_time=0.008920166, queue_time=0.006389374, total_time=0.092253499), x_groq={'id': 'req_01jccxebrfemjb5ag1a66d6jxc'})\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=100, prompt_tokens=202, total_tokens=302), raw_response=\"GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=50, prompt_tokens=74, total_tokens=124), raw_response='Paris, the capital of France, is famous for its stunning architecture, art museums, and iconic landmarks. Some of the most famous landmarks in Paris include:\\n\\n* The Eiffel Tower: Built for the 1889 World\\\\'s Fair, the Eiffel Tower is an iron lattice tower that stands 324 meters tall and is\", metadata=None)\n", + "\n", + "Full Conversation History:\n", + " What is the capital of France? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=8, prompt_tokens=23, total_tokens=31), raw_response='The capital of France is Paris.', metadata=None) What is its population? GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=62, prompt_tokens=85, total_tokens=147), raw_response='The population of Paris, the capital of France, is approximately 2.1 million people within its city limits. However, the metropolitan area of Paris, which includes the surrounding suburbs, has a population of over 12.2 million people, making it one of the most populous metropolitan areas in Europe.', metadata=None) Tell me about its famous landmarks GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=100, prompt_tokens=202, total_tokens=302), raw_response=\"GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=50, prompt_tokens=74, total_tokens=124), raw_response='Paris, the capital of France, is famous for its stunning architecture, art museums, and iconic landmarks. Some of the most famous landmarks in Paris include:\\n\\n* The Eiffel Tower: Built for the 1889 World\\\\'s Fair, the Eiffel Tower is an iron lattice tower that stands 324 meters tall and is\", metadata=None) \n" + ] + } + ], + "source": [ + "check_chat_conversation()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "EhF6taMXniS7" + }, + "source": [ + "### Adalflow - model_client() - **Groq model** LLM Multichat Usage (ModelType.LLM) - asynchronous (async())" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "id": "6pqSxmL_s11g" + }, + "outputs": [], + "source": [ + "import asyncio\n", + "from adalflow.components.model_client import GroqAPIClient\n", + "from adalflow.core.types import ModelType\n", + "from typing import List" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": { + "id": "40LRTSyOr884" + }, + "outputs": [], + "source": [ + "class ChatConversation:\n", + " def __init__(self):\n", + " # Using an asynchronous client for communication with GroqAPI\n", + " self.groq_client = GroqAPIClient() # Create an instance of GroqAPIClient\n", + " # Model configuration parameters (e.g., Llama model with 8b parameters and 8192 context length)\n", + " self.model_kwargs = {\n", + " \"model\": \"llama3-8b-8192\", # Llama model with specific size\n", + " \"temperature\": 0.5, # Degree of randomness in the model's responses\n", + " \"max_tokens\": 100, # Maximum number of tokens in the response\n", + " }\n", + "\n", + " async def get_response(self, message: str) -> str:\n", + " \"\"\"Get response from the model for a single message asynchronously\"\"\"\n", + "\n", + " # Convert the user input message to the appropriate format for the Groq API\n", + " api_kwargs = self.groq_client.convert_inputs_to_api_kwargs(\n", + " input=message, # User's input message\n", + " model_kwargs=self.model_kwargs, # Model parameters\n", + " model_type=ModelType.LLM, # Model type for large language models (LLM)\n", + " )\n", + " print(f\"api_kwargs: {api_kwargs}\") # Print the API arguments for debugging\n", + "\n", + " # Asynchronously call the Groq API with the provided API arguments\n", + " response = await self.groq_client.acall(\n", + " api_kwargs=api_kwargs, # Pass the API arguments\n", + " model_type=ModelType.LLM, # Specify the model type\n", + " )\n", + " print(\"response: \", response) # Print the API response for debugging\n", + "\n", + " # Parse the response to extract the assistant's reply from the API response\n", + " response_text = self.groq_client.parse_chat_completion(response)\n", + " return response_text # Return the assistant's response text" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "id": "Y-n1ksBSsC-J" + }, + "outputs": [], + "source": [ + "async def check_chat_conversations():\n", + " # Create an instance of ChatConversation\n", + " chat = ChatConversation()\n", + "\n", + " # List of unrelated questions for independent async calls\n", + " questions = [\n", + " \"What is the capital of France?\",\n", + " \"Is dog a wild animal ?\",\n", + " \"Tell me about amazon forest\",\n", + " ]\n", + "\n", + " # Run each question as an independent asynchronous task\n", + " tasks = [chat.get_response(question) for question in questions]\n", + " # Gather all the responses concurrently\n", + " responses = await asyncio.gather(*tasks)\n", + "\n", + " # Display each response alongside the question\n", + " for question, response in zip(questions, responses):\n", + " print(f\"\\nUser: {question}\")\n", + " print(f\"Assistant: {response}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "kvqOTUknsKMI", + "outputId": "df47682f-db10-4439-98fc-7cd0c8486776" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "api_kwargs: {'model': 'llama3-8b-8192', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': 'What is the capital of France?'}]}\n", + "api_kwargs: {'model': 'llama3-8b-8192', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': 'Is dog a wild animal ?'}]}\n", + "api_kwargs: {'model': 'llama3-8b-8192', 'temperature': 0.5, 'max_tokens': 100, 'messages': [{'role': 'system', 'content': 'Tell me about amazon forest'}]}\n", + "response: ChatCompletion(id='chatcmpl-d2fb086a-5d23-409e-b060-4c00578611fe', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='The capital of France is Paris.', role='assistant', function_call=None, tool_calls=None))], created=1731305379, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_6a6771ae9c', usage=CompletionUsage(completion_tokens=8, prompt_tokens=17, total_tokens=25, completion_time=0.006666667, prompt_time=0.003519913, queue_time=0.010127806000000001, total_time=0.01018658), x_groq={'id': 'req_01jccxf5szf5sas99m0xhrz2g8'})\n", + "response: ChatCompletion(id='chatcmpl-37af21d1-dd36-4ee4-a4f3-6cce914b25dd', choices=[Choice(finish_reason='length', index=0, logprobs=None, message=ChatCompletionMessage(content='The answer to this question is a bit nuanced.\\n\\nDomesticated dogs (Canis lupus familiaris) are not considered wild animals in the classical sense. They have been selectively bred by humans for thousands of years, which has led to significant changes in their behavior, physiology, and genetics. As a result, domesticated dogs have adapted to living alongside humans and have lost many of the characteristics that define wild animals.\\n\\nHowever, there are some feral dog populations that have descended from domesticated dogs', role='assistant', function_call=None, tool_calls=None))], created=1731305379, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_6a6771ae9c', usage=CompletionUsage(completion_tokens=100, prompt_tokens=16, total_tokens=116, completion_time=0.083333333, prompt_time=0.005273133, queue_time=0.007805676, total_time=0.088606466), x_groq={'id': 'req_01jccxf5t0epbv6dxgj28hvjpt'})\n", + "response: ChatCompletion(id='chatcmpl-85a584e8-5647-4112-84ec-bc770f16b091', choices=[Choice(finish_reason='length', index=0, logprobs=None, message=ChatCompletionMessage(content='The Amazon rainforest, also known as Amazonia, is the largest tropical rainforest in the world, covering an area of over 5.5 million square kilometers (2.1 million square miles) across nine countries in South America, including Brazil, Peru, Colombia, Venezuela, Ecuador, Bolivia, Guyana, Suriname, and French Guiana.\\n\\nHere are some fascinating facts about the Amazon rainforest:\\n\\n1. Biodiversity hotspots: The Amazon rainforest is home to an estimated', role='assistant', function_call=None, tool_calls=None))], created=1731305379, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_179b0f92c9', usage=CompletionUsage(completion_tokens=100, prompt_tokens=15, total_tokens=115, completion_time=0.086005899, prompt_time=0.000504017, queue_time=0.014784051999999999, total_time=0.086509916), x_groq={'id': 'req_01jccxf5ste18rkg69qqmfrjnk'})\n", + "\n", + "User: What is the capital of France?\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=8, prompt_tokens=17, total_tokens=25), raw_response='The capital of France is Paris.', metadata=None)\n", + "\n", + "User: Is dog a wild animal ?\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=100, prompt_tokens=16, total_tokens=116), raw_response='The answer to this question is a bit nuanced.\\n\\nDomesticated dogs (Canis lupus familiaris) are not considered wild animals in the classical sense. They have been selectively bred by humans for thousands of years, which has led to significant changes in their behavior, physiology, and genetics. As a result, domesticated dogs have adapted to living alongside humans and have lost many of the characteristics that define wild animals.\\n\\nHowever, there are some feral dog populations that have descended from domesticated dogs', metadata=None)\n", + "\n", + "User: Tell me about amazon forest\n", + "Assistant: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=100, prompt_tokens=15, total_tokens=115), raw_response='The Amazon rainforest, also known as Amazonia, is the largest tropical rainforest in the world, covering an area of over 5.5 million square kilometers (2.1 million square miles) across nine countries in South America, including Brazil, Peru, Colombia, Venezuela, Ecuador, Bolivia, Guyana, Suriname, and French Guiana.\\n\\nHere are some fascinating facts about the Amazon rainforest:\\n\\n1. Biodiversity hotspots: The Amazon rainforest is home to an estimated', metadata=None)\n" + ] + } + ], + "source": [ + "await check_chat_conversations()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "wHO2_4L7sOnL" + }, + "source": [ + "### Adalflow - model_client() - **Groq model** LLM Multichat Usage (ModelType.LLM) - Benchmark sync() vs async()" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": { + "id": "4yGh8iy8sON1" + }, + "outputs": [], + "source": [ + "import asyncio\n", + "import time\n", + "from adalflow.components.model_client import (\n", + " GroqAPIClient,\n", + ") # Assuming GroqAPI with .call() and .acall() is available\n", + "from adalflow.core.types import ModelType" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": { + "id": "DkMkbt7DtDQv" + }, + "outputs": [], + "source": [ + "# Initialize the Groq client\n", + "groq_client = GroqAPIClient()\n", + "\n", + "# Sample prompt for testing\n", + "prompt = \"Tell me a joke.\"\n", + "\n", + "model_kwargs = {\"model\": \"llama3-8b-8192\", \"temperature\": 0.5, \"max_tokens\": 100}" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "id": "IUEJ6KM7tDQw" + }, + "outputs": [], + "source": [ + "# Synchronous function for benchmarking .call()\n", + "def benchmark_sync_call(api_kwargs, runs=10):\n", + " # List to store responses from each synchronous call\n", + " responses = []\n", + "\n", + " # Record the start time for benchmarking\n", + " start_time = time.time()\n", + "\n", + " # Perform synchronous API calls in a loop\n", + " responses = [\n", + " groq_client.call( # Calling the API synchronously\n", + " api_kwargs=api_kwargs, # Passing the API arguments\n", + " model_type=ModelType.LLM, # Defining the model type\n", + " )\n", + " for _ in range(runs) # Repeat the call 'runs' times\n", + " ]\n", + "\n", + " # Record the end time after all calls are completed\n", + " end_time = time.time()\n", + "\n", + " # Print out the response from each synchronous call\n", + " for i, response in enumerate(responses):\n", + " print(f\"sync call {i + 1} completed: {response}\")\n", + "\n", + " # Print the total time taken for the synchronous benchmark\n", + " print(f\"\\nSynchronous benchmark completed in {end_time - start_time:.2f} seconds\")\n", + "\n", + "\n", + "# Asynchronous function for benchmarking .acall()\n", + "async def benchmark_async_acall(api_kwargs, runs=10):\n", + " # Record the start time for benchmarking\n", + " start_time = time.time()\n", + "\n", + " # Create a list of tasks for asynchronous API calls\n", + " tasks = [\n", + " groq_client.acall( # Calling the API asynchronously\n", + " api_kwargs=api_kwargs, # Passing the API arguments\n", + " model_type=ModelType.LLM, # Defining the model type\n", + " )\n", + " for _ in range(runs) # Repeat the call 'runs' times\n", + " ]\n", + "\n", + " # Await the completion of all tasks concurrently\n", + " responses = await asyncio.gather(\n", + " *tasks\n", + " ) # Gather all the responses from asynchronous calls\n", + "\n", + " # Record the end time after all asynchronous calls are completed\n", + " end_time = time.time()\n", + "\n", + " # Print out the response from each asynchronous call\n", + " for i, response in enumerate(responses):\n", + " print(f\"Async call {i + 1} completed: {response}\")\n", + "\n", + " # Print the total time taken for the asynchronous benchmark\n", + " print(f\"\\nAsynchronous benchmark completed in {end_time - start_time:.2f} seconds\")" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "idOjFAo8tDQw", + "outputId": "cb790957-8960-4e58-a7de-39dfd0dd3504" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Starting synchronous benchmark...\n", + "\n", + "sync call 1 completed: ChatCompletion(id='chatcmpl-a6bc4231-b712-4014-a87d-0e9368f5d8f4', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305394, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_179b0f92c9', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.000141559, queue_time=0.01454033, total_time=0.015141559), x_groq={'id': 'req_01jccxfkx7epcsynkkex05e6v6'})\n", + "sync call 2 completed: ChatCompletion(id='chatcmpl-00586f1c-f6fb-4650-a549-ff24d462c6bf', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305394, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_179b0f92c9', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.000141569, queue_time=0.013657111000000001, total_time=0.015141569), x_groq={'id': 'req_01jccxfm15fs0vyr85remr47wm'})\n", + "sync call 3 completed: ChatCompletion(id='chatcmpl-a5fe8868-ca01-445e-89ba-d5791da524fa', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305394, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_179b0f92c9', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.000138579, queue_time=0.014364931000000001, total_time=0.015138579), x_groq={'id': 'req_01jccxfm4ye4z89hff0f8d0yas'})\n", + "sync call 4 completed: ChatCompletion(id='chatcmpl-7ae04f5f-79c0-49b4-9f08-decc05393809', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305394, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_a97cfe35ae', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.002182427, queue_time=0.011133002, total_time=0.017182427), x_groq={'id': 'req_01jccxfm8wf4pacws56qqkbcrg'})\n", + "sync call 5 completed: ChatCompletion(id='chatcmpl-4023328d-0e1b-4127-b124-06b1d2ec4c86', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305394, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_179b0f92c9', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.000136529, queue_time=0.013371651, total_time=0.015136529), x_groq={'id': 'req_01jccxfmcpfs0twwzvvvrf8g5s'})\n", + "sync call 6 completed: ChatCompletion(id='chatcmpl-9713209a-bbad-491b-8eec-7f9ba3faf0c0', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305394, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_6a6771ae9c', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.002243946, queue_time=0.011401844, total_time=0.017243946), x_groq={'id': 'req_01jccxfmgcf85vtdzmt7mwfk8x'})\n", + "sync call 7 completed: ChatCompletion(id='chatcmpl-1bf326d8-68f8-4117-801e-4146d0085114', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Here's one:\\n\\nWhy couldn't the bicycle stand up by itself?\\n\\n(Wait for it...)\\n\\nBecause it was two-tired!\\n\\nHope that made you laugh!\", role='assistant', function_call=None, tool_calls=None))], created=1731305394, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_6a6771ae9c', usage=CompletionUsage(completion_tokens=33, prompt_tokens=15, total_tokens=48, completion_time=0.0275, prompt_time=0.002932829, queue_time=0.011706590000000001, total_time=0.030432829), x_groq={'id': 'req_01jccxfmm7e4ztjp9fn0kkbjx0'})\n", + "sync call 8 completed: ChatCompletion(id='chatcmpl-1d5ecb3b-c923-4c36-a89b-ad086ee677e6', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305394, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_a97cfe35ae', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.002190067, queue_time=0.011227092999999999, total_time=0.017190067), x_groq={'id': 'req_01jccxfmrgfdpbjbb07248341m'})\n", + "sync call 9 completed: ChatCompletion(id='chatcmpl-d5f1ff90-9100-472b-aad0-2e18e67a1871', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_179b0f92c9', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.000136839, queue_time=0.014356821, total_time=0.015136839), x_groq={'id': 'req_01jccxfmw9f4p9qvktvtp0g557'})\n", + "sync call 10 completed: ChatCompletion(id='chatcmpl-c19f72d7-a2c4-48e3-848f-bef6a514a842', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_6a6771ae9c', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.002243396, queue_time=0.011192144, total_time=0.017243396), x_groq={'id': 'req_01jccxfn06f85td634z5vyhzrt'})\n", + "\n", + "Synchronous benchmark completed in 1.42 seconds\n", + "\n", + "Starting asynchronous benchmark...\n", + "\n", + "Async call 1 completed: ChatCompletion(id='chatcmpl-06c89067-a76f-484a-87ba-159f6b36564a', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_6a6771ae9c', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.002921139, queue_time=0.015738821, total_time=0.017921139), x_groq={'id': 'req_01jccxfn9cejvbpr29s0k0nkhr'})\n", + "Async call 2 completed: ChatCompletion(id='chatcmpl-2a5e8ccf-8058-4a77-a60a-5f7b86c71fb9', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_a97cfe35ae', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.004858751, queue_time=0.010167037, total_time=0.019858751), x_groq={'id': 'req_01jccxfn9dfrxvvexkv623ezng'})\n", + "Async call 3 completed: ChatCompletion(id='chatcmpl-54d3f2e8-5603-4d2f-8396-b72a2716da2a', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_6a6771ae9c', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.002244876, queue_time=0.012254712000000001, total_time=0.017244876), x_groq={'id': 'req_01jccxfn8ye8tasgfq5hzjrzyd'})\n", + "Async call 4 completed: ChatCompletion(id='chatcmpl-a4fd586f-1ec8-423b-af69-b0300b940d11', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_a97cfe35ae', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.002188018, queue_time=0.01165656, total_time=0.017188018), x_groq={'id': 'req_01jccxfn96e0b9swhyd96cs7mg'})\n", + "Async call 5 completed: ChatCompletion(id='chatcmpl-4d63c669-7242-4f31-be2d-b31eb0870245', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Here's one:\\n\\nWhy couldn't the bicycle stand up by itself?\\n\\n(wait for it...)\\n\\nBecause it was two-tired!\\n\\nHope that made you laugh!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_a97cfe35ae', usage=CompletionUsage(completion_tokens=32, prompt_tokens=15, total_tokens=47, completion_time=0.026666667, prompt_time=0.002829583, queue_time=0.011314187, total_time=0.02949625), x_groq={'id': 'req_01jccxfn9ee4zrdjw0n9jktjkt'})\n", + "Async call 6 completed: ChatCompletion(id='chatcmpl-5c30e90c-135b-49dc-8f8e-966fdb391dc7', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_179b0f92c9', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.000155179, queue_time=0.014245601, total_time=0.015155179), x_groq={'id': 'req_01jccxfna4f5vv7b66gyk9zwam'})\n", + "Async call 7 completed: ChatCompletion(id='chatcmpl-eda5d2dc-82e3-40ca-a544-c770726bc8d0', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_6a6771ae9c', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.004997677, queue_time=0.008474321, total_time=0.019997677), x_groq={'id': 'req_01jccxfn9ff8ar78qnbtnqryec'})\n", + "Async call 8 completed: ChatCompletion(id='chatcmpl-1132c5ca-1ba2-49ae-94ee-359c3049d4d1', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_a97cfe35ae', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.014361024, queue_time=0.0029951239999999983, total_time=0.029361024), x_groq={'id': 'req_01jccxfndfe1b8hre70xfj9cde'})\n", + "Async call 9 completed: ChatCompletion(id='chatcmpl-44ea61fb-a1a8-4b70-a5b0-96d793041a48', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Why couldn't the bicycle stand up by itself?\\n\\nBecause it was two-tired!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_a97cfe35ae', usage=CompletionUsage(completion_tokens=18, prompt_tokens=15, total_tokens=33, completion_time=0.015, prompt_time=0.004858171, queue_time=0.010396207000000001, total_time=0.019858171), x_groq={'id': 'req_01jccxfn9gfh49k150pw1gsysz'})\n", + "Async call 10 completed: ChatCompletion(id='chatcmpl-2ac98624-8d3f-41f8-abef-5f8b5aebf7ab', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Here's one:\\n\\nWhy couldn't the bicycle stand up by itself?\\n\\n(Wait for it...)\\n\\nBecause it was two-tired!\\n\\nHope that made you laugh!\", role='assistant', function_call=None, tool_calls=None))], created=1731305395, model='llama3-8b-8192', object='chat.completion', system_fingerprint='fp_6a6771ae9c', usage=CompletionUsage(completion_tokens=33, prompt_tokens=15, total_tokens=48, completion_time=0.0275, prompt_time=0.002554123, queue_time=0.010962996, total_time=0.030054123), x_groq={'id': 'req_01jccxfn9de0b97f7wj7kvsznw'})\n", + "\n", + "Asynchronous benchmark completed in 0.44 seconds\n" + ] + } + ], + "source": [ + "api_kwargs = groq_client.convert_inputs_to_api_kwargs(\n", + " input=prompt, model_kwargs=model_kwargs, model_type=ModelType.LLM\n", + ")\n", + "\n", + "# Run both benchmarks\n", + "print(\"Starting synchronous benchmark...\\n\")\n", + "benchmark_sync_call(api_kwargs)\n", + "\n", + "print(\"\\nStarting asynchronous benchmark...\\n\")\n", + "await benchmark_async_acall(api_kwargs)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "bcfOfW5wteYr" + }, + "source": [ + "### Adalflow - model_client() - **Custom Model** client building (ModelType.LLM) and (ModelType.EMBEDDER) - Synchronous\n", + "Note: I am using openai api as a example to build custom model client in adalflow. Even though its already there in adalflow repo below code will definitly be a starter code whom ever wants to build a custom model client" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": { + "id": "kOeBbL31tmLz" + }, + "outputs": [], + "source": [ + "# Building simple custom third party model client and using it\n", + "# I have modified convert_inputs_to_api_kwargs() to make sure it follows the prompt of openai and i have used appropiate\n", + "# openai api call in __call__()\n", + "\n", + "import openai\n", + "from adalflow.core.model_client import ModelClient\n", + "from adalflow.core.types import ModelType, GeneratorOutput, EmbedderOutput\n", + "from openai.types import (\n", + " CreateEmbeddingResponse,\n", + ")\n", + "from adalflow.components.model_client.utils import parse_embedding_response" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "id": "7GhkATzXuBdQ" + }, + "outputs": [], + "source": [ + "class SimpleCustomModelClient(ModelClient):\n", + " # Initialize the custom model client\n", + " def __init__(self):\n", + " # Call the parent class's initializer\n", + " super().__init__()\n", + " pass # Placeholder for any initialization logic if needed in the future\n", + "\n", + " # Method to convert input into API parameters for different model types (LLM or Embedder)\n", + " def convert_inputs_to_api_kwargs(\n", + " self, input=None, model_kwargs={}, model_type=ModelType.UNDEFINED\n", + " ):\n", + " \"\"\"\n", + " Convert the inputs into API arguments based on the model type.\n", + "\n", + " Args:\n", + " input (str): The input text to be processed.\n", + " model_kwargs (dict): Additional model parameters like temperature, max_tokens, etc.\n", + " model_type (ModelType): The type of model to use (LLM or Embedder).\n", + "\n", + " Returns:\n", + " dict: API arguments formatted for the specified model type.\n", + " \"\"\"\n", + " if (\n", + " model_type == ModelType.LLM\n", + " ): # If the model type is a large language model (LLM)\n", + " return {\n", + " \"model\": model_kwargs[\n", + " \"model\"\n", + " ], # Set the model to use (e.g., GPT-3, GPT-4)\n", + " \"messages\": input, # Provide the input as the message\n", + " \"temperature\": model_kwargs[\n", + " \"temperature\"\n", + " ], # Set the temperature (creativity of the response)\n", + " \"max_tokens\": model_kwargs[\n", + " \"max_tokens\"\n", + " ], # Max tokens to generate in the response\n", + " }\n", + " elif model_type == ModelType.EMBEDDER: # If the model type is an embedder\n", + " return {\n", + " \"model\": model_kwargs[\"model\"], # Model name for embedding\n", + " \"input\": [input], # Provide the input in a list format for embedding\n", + " }\n", + " else:\n", + " # Raise an error if the model type is unsupported\n", + " raise ValueError(f\"model_type {model_type} is not supported\")\n", + "\n", + " # Method to make the actual API call to OpenAI for either completions (LLM) or embeddings\n", + " def call(self, api_kwargs={}, model_type=ModelType.UNDEFINED):\n", + " \"\"\"\n", + " Call the appropriate OpenAI API method based on the model type (LLM or Embedder).\n", + "\n", + " Args:\n", + " api_kwargs (dict): Arguments to be passed to the API call.\n", + " model_type (ModelType): The type of model (LLM or Embedder).\n", + "\n", + " Returns:\n", + " Response: The API response from OpenAI.\n", + " \"\"\"\n", + " if model_type == ModelType.LLM: # If the model type is LLM (e.g., GPT-3, GPT-4)\n", + " return openai.chat.completions.create(\n", + " **api_kwargs\n", + " ) # Call the chat API for completion\n", + " elif model_type == ModelType.EMBEDDER: # If the model type is Embedder\n", + " return openai.embeddings.create(**api_kwargs) # Call the embedding API\n", + " else:\n", + " # Raise an error if an invalid model type is passed\n", + " raise ValueError(f\"Unsupported model type: {model_type}\")\n", + "\n", + " # Method to parse the response from a chat completion API call\n", + " def parse_chat_completion(self, completion):\n", + " \"\"\"\n", + " Parse the response from a chat completion API call into a custom output format.\n", + "\n", + " Args:\n", + " completion: The completion response from the OpenAI API.\n", + "\n", + " Returns:\n", + " GeneratorOutput: A custom data structure containing the parsed response.\n", + " \"\"\"\n", + " # Note: GeneratorOutput is a adalflow dataclass that contains the parsed completion data\n", + " return GeneratorOutput(\n", + " data=completion, # Store the raw completion data\n", + " error=None, # No error in this case\n", + " raw_response=str(completion), # Store the raw response as a string\n", + " )\n", + "\n", + " # Method to parse the response from an embedding API call\n", + " def parse_embedding_response(\n", + " self, response: CreateEmbeddingResponse\n", + " ) -> EmbedderOutput:\n", + " \"\"\"\n", + " Parse the response from an embedding API call into a custom output format.\n", + "\n", + " Args:\n", + " response (CreateEmbeddingResponse): The response from the embedding API.\n", + "\n", + " Returns:\n", + " EmbedderOutput: A custom data structure containing the parsed embedding response.\n", + " \"\"\"\n", + " try:\n", + " # Attempt to parse the embedding response using a helper function\n", + " return parse_embedding_response(response)\n", + " except Exception as e:\n", + " # If parsing fails, return an error message with the raw response\n", + " return EmbedderOutput(data=[], error=str(e), raw_response=response)" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "id": "W0p7jVaeuE66" + }, + "outputs": [], + "source": [ + "def build_custom_model_client():\n", + " # Instantiate the custom model client (SimpleCustomModelClient)\n", + " custom_client = SimpleCustomModelClient()\n", + "\n", + " # Define the query for the model to process\n", + " query = \"What is the capital of France?\"\n", + "\n", + " # Set the model type for a Large Language Model (LLM)\n", + " model_type = ModelType.LLM\n", + "\n", + " # Prepare the message prompt as expected by the OpenAI chat API.\n", + " # This format is suitable for GPT-like models (e.g., gpt-3.5-turbo).\n", + " message_prompt = [\n", + " {\n", + " \"role\": \"user\", # Define the user role in the conversation\n", + " \"content\": [\n", + " {\n", + " \"type\": \"text\", # Specify that the input is a text type\n", + " \"text\": query, # The actual query to be processed by the model\n", + " }\n", + " ],\n", + " }\n", + " ]\n", + "\n", + " # Print message indicating the usage of the LLM model type\n", + " print(\"ModelType LLM\")\n", + "\n", + " # Define additional model parameters like model name, temperature, and max tokens for LLM\n", + " model_kwargs = {\"model\": \"gpt-3.5-turbo\", \"temperature\": 0.5, \"max_tokens\": 100}\n", + "\n", + " # Convert the input message and model kwargs into the required API parameters\n", + " api_kwargs = custom_client.convert_inputs_to_api_kwargs(\n", + " input=message_prompt, model_kwargs=model_kwargs, model_type=model_type\n", + " )\n", + "\n", + " # Print the API arguments that will be passed to the call method\n", + " print(f\"api_kwargs: {api_kwargs}\")\n", + "\n", + " # Call the LLM model using the prepared API arguments\n", + " result = custom_client.call(api_kwargs, ModelType.LLM)\n", + "\n", + " # Print the result of the LLM model call (response from OpenAI)\n", + " print(result)\n", + "\n", + " # Parse the chat completion response and output a more structured result\n", + " response_text = custom_client.parse_chat_completion(result)\n", + "\n", + " # Print the structured response from the chat completion\n", + " print(f\"response_text: {response_text}\")\n", + "\n", + " # Switch to using the Embedder model type\n", + " print(\"ModelType EMBEDDER\")\n", + "\n", + " # Define model-specific parameters for the embedding model\n", + " model_kwargs = {\n", + " \"model\": \"text-embedding-3-small\",\n", + " \"dimensions\": 8,\n", + " \"encoding_format\": \"float\",\n", + " }\n", + "\n", + " # Convert the input query for the embedder model\n", + " api_kwargs = custom_client.convert_inputs_to_api_kwargs(\n", + " input=query, model_kwargs=model_kwargs, model_type=ModelType.EMBEDDER\n", + " )\n", + "\n", + " # Print the API arguments that will be passed to the embedder model\n", + " print(f\"embedder api_kwargs: {api_kwargs}\")\n", + "\n", + " # Call the Embedder model using the prepared API arguments\n", + " result = custom_client.call(api_kwargs, ModelType.EMBEDDER)\n", + "\n", + " # Print the result of the Embedder model call (embedding response)\n", + " print(result)\n", + "\n", + " # Parse the embedding response and output a more structured result\n", + " response_text = custom_client.parse_embedding_response(result)\n", + "\n", + " # Print the structured response from the embedding model\n", + " print(f\"response_text: {response_text}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "RkVCvbWruKs4", + "outputId": "ffa02fa3-7570-4bf1-9880-0288d358f815" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ModelType LLM\n", + "api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'user', 'content': [{'type': 'text', 'text': 'What is the capital of France?'}]}], 'temperature': 0.5, 'max_tokens': 100}\n", + "ChatCompletion(id='chatcmpl-ASHw0PEDqdMlIAIZwr8w2t4L3C9u2', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='The capital of France is Paris.', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305488, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=14, total_tokens=21, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\n", + "response_text: GeneratorOutput(id=None, data=ChatCompletion(id='chatcmpl-ASHw0PEDqdMlIAIZwr8w2t4L3C9u2', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='The capital of France is Paris.', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305488, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=14, total_tokens=21, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0))), error=None, usage=None, raw_response=\"ChatCompletion(id='chatcmpl-ASHw0PEDqdMlIAIZwr8w2t4L3C9u2', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='The capital of France is Paris.', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1731305488, model='gpt-3.5-turbo-0125', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=14, total_tokens=21, completion_tokens_details=CompletionTokensDetails(audio_tokens=0, reasoning_tokens=0, accepted_prediction_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))\", metadata=None)\n", + "ModelType EMBEDDER\n", + "embedder api_kwargs: {'model': 'text-embedding-3-small', 'input': ['What is the capital of France?']}\n", + "CreateEmbeddingResponse(data=[Embedding(embedding=[0.04169800877571106, 0.0158005952835083, 0.028160491958260536, 0.024351144209504128, -0.023142803460359573, -0.002739247865974903, -0.014223608188331127, 0.01433624979108572, 0.010834109038114548, -0.010199218057096004, 0.006942841224372387, -0.024043940007686615, -0.06164587661623955, -0.01508378330618143, -0.014233848080039024, 0.023163283243775368, -0.006625395733863115, 0.019446099177002907, 0.07241854071617126, -0.024392105638980865, 0.003002932295203209, -0.010091695934534073, -0.04100167378783226, 0.011970768682658672, 0.06209644302725792, 0.0070964437909424305, -0.04554831609129906, -0.007347328122705221, 0.00364038348197937, 0.03942468762397766, 0.04214857518672943, -0.0251498781144619, -0.0019558740314096212, 0.04309067130088806, -0.024535467848181725, -0.03995717689394951, -0.03764289617538452, -0.039342764765024185, 0.021320052444934845, 0.029676036909222603, -0.003136054612696171, -0.01302550733089447, 0.00684555945917964, 0.013230310752987862, -0.027320796623826027, -0.030679574236273766, -0.009221280924975872, -0.039936695247888565, -0.03360826522111893, 0.02748463861644268, 0.03883075714111328, 0.004044870380312204, 0.03252280876040459, 0.03262520954012871, -0.016814373433589935, 0.004218953661620617, 0.024678830057382584, 0.009641128592193127, 0.04665425419807434, 0.015544591471552849, 0.036127351224422455, -0.010265778750181198, 0.026358218863606453, 0.0043085552752017975, 0.0005580897559411824, 0.0354514978826046, -0.0039322287775576115, 0.03788866102695465, 0.05906534940004349, 0.04612176492810249, -0.011059393174946308, 0.016312604770064354, -0.00918543990701437, 0.004631120711565018, -0.006594675127416849, -0.018145596608519554, -0.003968069329857826, -0.0059649040922522545, -0.03207223862409592, -0.031867437064647675, -0.036168310791254044, 0.0010604985291138291, -0.01807391457259655, -0.008606869727373123, 0.008248464204370975, -0.044647179543972015, 0.026767827570438385, 0.03383354842662811, -0.022917520254850388, -0.04767827317118645, 0.0033997392747551203, 0.011141314171254635, -0.025928132236003876, 0.027115993201732635, -0.010388661175966263, 0.01921057514846325, 0.03549245744943619, 0.0011750605190172791, -0.06819958984851837, 0.000605450535658747, 0.019323216751217842, -0.023982498794794083, -0.031109662726521492, 0.026972630992531776, 0.02560044638812542, 0.040182460099458694, 0.015862036496400833, -0.004974166862666607, 0.003153975121676922, -0.03852355107665062, -0.025661887601017952, 0.011212995275855064, 0.0033536585979163647, 0.02431018464267254, -0.04812883958220482, -0.029102588072419167, -0.023859616369009018, -0.02416682057082653, 0.02902066521346569, -0.02574380859732628, 0.033157698810100555, 0.052511636167764664, -0.04718674346804619, 0.010337459854781628, 0.010752187110483646, -0.013424874283373356, -0.0027725284453481436, -0.002777648391202092, 0.03491900861263275, -0.03870787471532822, 0.01074194721877575, -0.02752560004591942, 0.024535467848181725, 0.033055298030376434, 0.031232545152306557, 0.01897505111992359, 0.026952149346470833, -0.016937255859375, -0.018544962629675865, 0.010782907716929913, 0.007931018248200417, 0.013189350254833698, 0.021668218076229095, 0.003315257839858532, -0.02668590471148491, -0.01458201464265585, -0.04143176227807999, 0.040530625730752945, 0.01154068112373352, -0.042312417179346085, 0.040428224951028824, -0.02312232367694378, -0.0038989479653537273, 0.01604636013507843, -0.0056525785475969315, -0.036721281707286835, -0.008970396593213081, 0.019824985414743423, 0.0059649040922522545, 0.04341835901141167, -0.03878979757428169, 0.04927574098110199, -0.03719232976436615, -0.006026345305144787, 0.012257494032382965, 0.03287097439169884, -0.03643455356359482, -0.02140197344124317, 0.00695820152759552, -0.005381213966757059, -0.02461738884449005, 0.004137032199651003, 0.054354868829250336, 0.021156208589673042, 0.03006516396999359, -0.024392105638980865, -0.04943958297371864, 0.0406944714486599, 0.0003852867230307311, -0.01936417818069458, -0.028344813734292984, -0.02803760953247547, 0.011735244654119015, 0.013045987114310265, 0.061277229338884354, 0.029532674700021744, -0.011284676380455494, -0.025477563962340355, -0.014428411610424519, 0.012564699165523052, 0.03582014515995979, -0.02020387165248394, 0.06160491332411766, -0.008207502774894238, -0.043950848281383514, 0.0198147464543581, 0.03352634608745575, 0.01265686098486185, 0.012267733924090862, -0.007997579872608185, -0.020490597933530807, 0.02193446271121502, -0.00551945623010397, 0.014377210289239883, -0.02158629707992077, 0.030536212027072906, -0.011591882444918156, -0.013496555387973785, -0.01398808415979147, -0.010286259464919567, 0.0009939373703673482, -0.003008052473887801, -0.02521131932735443, 0.00474120257422328, 0.0012096210848540068, 0.025026995688676834, -9.424164454685524e-05, 0.01112083438783884, 0.004208713304251432, 0.024494506418704987, 0.022815117612481117, 0.015216905623674393, 0.003947588615119457, -0.01148948073387146, -0.05591137334704399, 0.047473467886447906, 0.06185067817568779, 0.011110593564808369, 0.007116924040019512, -0.0036890243645757437, 0.021012846380472183, -0.03192887827754021, 0.0009395363740622997, -0.011223236098885536, -0.03283001109957695, 0.017705269157886505, 0.014141686260700226, 0.02832433395087719, -0.03524669632315636, 0.022815117612481117, -0.010803388431668282, 0.021135728806257248, 0.02863154001533985, -0.006625395733863115, -0.012298454530537128, -0.005204570945352316, 0.027464158833026886, 0.036270711570978165, 0.005877862684428692, 0.04337739571928978, 0.057426922023296356, -0.0076238131150603294, -0.0018624324584379792, -0.005703779403120279, -0.019743064418435097, 0.059556879103183746, -0.024494506418704987, -0.02818097174167633, -0.0359635055065155, -0.018145596608519554, 0.006650995928794146, 0.004362315870821476, -0.002106916857883334, -0.014326009899377823, 0.020869484171271324, 0.00018768326845020056, -0.01986594684422016, -0.024678830057382584, -0.014684416353702545, -0.008709271438419819, 0.009738409891724586, -0.003530301619321108, -0.0166812501847744, 0.009892012923955917, -0.019005771726369858, 0.015872277319431305, -0.01856544427573681, -0.00817166268825531, -0.021258611232042313, 0.0370284840464592, -0.0268907081335783, 0.04481102153658867, -0.012892385013401508, 0.0419028103351593, -0.051774341613054276, 0.0009952173568308353, 0.04423757269978523, 0.021258611232042313, -0.012605659663677216, 0.03065909445285797, 0.021033326163887978, 0.01985570602118969, -0.019435858353972435, -0.002831409452483058, -0.0029978123493492603, -0.04427853226661682, -0.003950148820877075, 0.0011648202780634165, 0.026870228350162506, -0.001858592382632196, -0.022753676399588585, 0.022466951981186867, -0.005186650436371565, 0.010035375133156776, -0.04517966881394386, 0.06574194878339767, -0.0051431297324597836, 0.047063861042261124, 0.05214298889040947, 0.00638987123966217, -0.039240363985300064, -0.03143734857439995, 0.024637870490550995, -0.03422267735004425, -0.010224818252027035, 0.045589275658130646, -0.013240550644695759, -0.0004217673558741808, 0.029635077342391014, -0.00687115965411067, 0.025129398331046104, 0.00804365985095501, 0.02451498806476593, -0.008376466110348701, -0.0023782814387232065, 0.01683485321700573, 0.012370135635137558, 0.02650158293545246, -0.03506237268447876, -0.02381865493953228, -0.0005033687921240926, 0.011407558806240559, 0.004651600960642099, -0.00990737322717905, -0.026112455874681473, -0.02099236659705639, 0.004933205898851156, 0.03901508077979088, 0.0013401834294199944, -0.014151927083730698, -0.0333625003695488, 0.04640848934650421, 0.009205920621752739, 0.03094581887125969, 0.003264056984335184, -0.026071494445204735, 0.018852168694138527, 0.02465835027396679, 0.012237013317644596, 0.0034663004335016012, -0.027402717620134354, -0.007209085859358311, -0.009190560318529606, -0.008176782168447971, -0.027771364897489548, 0.002693166956305504, 0.0702066645026207, -0.022405510768294334, -0.06353006511926651, 0.03995717689394951, -0.04046918451786041, -0.0492347776889801, -0.025784770026803017, -0.04837460443377495, -0.03381307050585747, -0.0039271083660423756, 0.013353193178772926, 0.004339275881648064, -0.020275553688406944, 0.06266989558935165, -0.03268665075302124, 0.0050637684762477875, -0.004106311593204737, -0.02090020477771759, 0.0425991415977478, -0.030085645616054535, 0.04235338047146797, 0.02119717001914978, 0.013793520629405975, -0.01633308455348015, -0.028590578585863113, -0.01782815158367157, 0.015472909435629845, -0.026112455874681473, 0.06140011176466942, 0.014418171718716621, 0.0824129581451416, -0.04210761561989784, -0.009810090996325016, 0.03045429103076458, -0.005196890793740749, 0.010414261370897293, 0.03174455463886261, 0.03784770146012306, -0.07372928410768509, 0.00563209829851985, 0.01000465452671051, 0.018135355785489082, -0.007413889281451702, -0.038892198354005814, 0.021750139072537422, -0.0187702476978302, -0.03147830814123154, -0.049644384533166885, 0.07581828534603119, 0.02055203914642334, 0.026010053232312202, 0.0049127256497740746, 0.014817538671195507, -0.03244088590145111, -0.004838484339416027, -0.06045801565051079, 0.008407186716794968, -0.011806925758719444, 0.002859569853171706, 0.05161049962043762, 0.06066281720995903, -0.06926456838846207, 0.026276297867298126, -0.015677712857723236, -0.003386939177289605, -0.0044570378959178925, -0.046531371772289276, 0.00856590922921896, -0.022303108125925064, -0.008227983489632607, -0.015493390150368214, -0.04690001904964447, 0.003983429633080959, 0.02867249958217144, -0.0010368181392550468, -0.0045363991521298885, 0.0017062698025256395, 0.0016051479615271091, 0.0011558601399883628, -0.007229566108435392, 0.006482033059000969, 0.04550735279917717, -0.03199031949043274, 0.023347606882452965, 0.016957735642790794, 0.0008672151016071439, 0.002657326404005289, -0.013865201734006405, -0.03676224127411842, -0.018729286268353462, 0.03743809461593628, 0.013066467829048634, -0.04616272449493408, 0.046777136623859406, -0.022446472197771072, 0.007966859266161919, -0.02134053222835064, -0.01714205928146839, -0.007772295735776424, 0.03743809461593628, 0.026071494445204735, 0.0901135727763176, 0.008571029640734196, 0.0002102436701534316, 0.003084853757172823, 0.059597838670015335, 0.013240550644695759, 0.027853285893797874, 0.034447960555553436, 0.023654812946915627, 0.026583503931760788, 0.015821075066924095, -0.04046918451786041, -0.04603984206914902, -0.005135449580848217, 0.04509774595499039, 0.010158257558941841, 0.014305529184639454, -0.027791844680905342, -0.020971884950995445, -0.058164212852716446, 0.014991621486842632, -0.05423198640346527, -0.024781232699751854, -0.03844163194298744, 0.008611990138888359, -0.0031642152462154627, -0.02584621123969555, -0.08204431086778641, 0.006246509030461311, 0.005030487664043903, 0.03838019073009491, -0.032113201916217804, 0.02203686349093914, 0.04186185076832771, -0.013783280737698078, -0.0034995810128748417, -0.005806181114166975, 0.02818097174167633, -0.008089740760624409, -0.04341835901141167, -0.01732638292014599, -0.017705269157886505, -0.05644386261701584, -0.015964439138770103, 0.015012102201581001, 0.006722677033394575, 0.009948333725333214, 0.04218953475356102, 0.05820517614483833, 0.04694097861647606, 0.026030534878373146, -0.023654812946915627, -0.010516663081943989, 0.014520573429763317, -0.04829268157482147, 0.012626140378415585, 0.020080991089344025, -0.011755725368857384, 0.008253583684563637, -0.02381865493953228, 0.012011729180812836, -0.0015705873956903815, -0.026808787137269974, 0.025047477334737778, 0.01603611931204796, -0.002360361162573099, 0.006313070189207792, 0.027607521042227745, -0.008007819764316082, -0.009784490801393986, 0.001804831437766552, 0.03153974935412407, -0.056525785475969315, 0.005724259652197361, -0.021504374220967293, -0.011581641621887684, -0.0017830710858106613, -0.009118879213929176, 0.0008339345222339034, -0.009513125754892826, 0.04927574098110199, 0.016599329188466072, 0.04358220100402832, -0.0006348910974338651, -0.003113014390692115, 0.005381213966757059, -0.014244087971746922, -0.03608638793230057, -0.01856544427573681, 0.006313070189207792, -0.05136473476886749, -0.01970210298895836, -0.03362874686717987, -0.022446472197771072, 0.0576317235827446, -0.04431949183344841, -0.0436641201376915, -0.0021849980112165213, 0.008924315683543682, -0.028201451525092125, -0.0027981288731098175, 0.04235338047146797, -0.003909188322722912, 0.029245950281620026, 0.028733940795063972, 0.037417612969875336, 0.005606497637927532, 0.004183113109320402, 0.004213833715766668, 0.006394991651177406, 0.003141174791380763, -0.010255538858473301, -0.03455036133527756, 0.019599702209234238, 0.04354123771190643, 0.017049897462129593, -0.013568236492574215, 0.01190932746976614, 0.010700986720621586, -0.03180599585175514, 0.0026624463498592377, -0.006400111597031355, 0.05996648594737053, 0.013629677705466747, 0.0020390755962580442, 0.011929808184504509, -0.05472351610660553, -0.0569353923201561, 0.0225283931940794, -0.004899925552308559, 0.015923477709293365, -0.018299199640750885, -0.056771550327539444, -0.01986594684422016, 0.06279277801513672, 0.007444609887897968, -0.005831781774759293, 0.0036839041858911514, 0.005360733717679977, 0.00666123628616333, -0.023060882464051247, -0.025969093665480614, -0.009876652620732784, 0.0010195377981290221, 0.008673431351780891, -0.0253137219697237, -0.04218953475356102, 0.02476075105369091, -0.014797057956457138, 0.025539005175232887, -0.027709923684597015, 0.013936882838606834, 0.005785700865089893, 0.010368180461227894, -0.0006460912409238517, 0.005714019760489464, 0.01344535406678915, -0.006302829831838608, 0.012185812927782536, -0.0016345884650945663, 0.0309048593044281, -0.004649041220545769, 0.022671755403280258, -0.006533233914524317, 0.028652019798755646, -0.025887170806527138, 0.0092571210116148, -0.01698845624923706, 0.018084155395627022, 0.011161794885993004, 0.010388661175966263, 0.022446472197771072, -0.02639918029308319, -0.01643548719584942, 0.021852541714906693, -0.015565071254968643, 0.010311859659850597, -0.02207782492041588, -0.03278905153274536, 0.022016383707523346, -0.0009350563050247729, 0.00790029764175415, -0.0403258241713047, 0.045630235224962234, 0.005114969331771135, -0.008489107713103294, 0.01793055236339569, -0.021852541714906693, -0.001845792168751359, 0.024924594908952713, 0.04143176227807999, 0.006640756037086248, -0.014592254534363747, 0.030781976878643036, 0.029204988852143288, 0.004679761826992035, 0.007265406660735607, 0.004825684241950512, 0.022548872977495193, 0.03119158372282982, -0.0024730032309889793, -0.026010053232312202, 0.06570098549127579, -0.02451498806476593, -0.027361758053302765, 0.03360826522111893, -0.015339787118136883, -0.034591324627399445, 0.005975143983960152, -0.03524669632315636, 0.00684555945917964, 0.05255259573459625, 0.032809533178806305, -0.03348538279533386, -0.00364038348197937, 0.02392105758190155, 0.03129398450255394, -0.03598398715257645, -0.008560789749026299, -0.007567491848021746, -0.0017190700164064765, -0.025661887601017952, 0.009579687379300594, 0.04530255123972893, 0.028488175943493843, -0.01124371588230133, 0.03907652199268341, -0.0022566793486475945, 0.009164960123598576, 0.019876185804605484, 0.05017687380313873, 0.027894247323274612, -0.012370135635137558, -0.01876000687479973, -0.0022029185201972723, -0.02238503098487854, 0.04067398980259895, 0.025375163182616234, -0.03723328933119774, 0.03817538544535637, 0.013291751965880394, 0.013936882838606834, 0.01975330524146557, -0.005790820810943842, 0.00148226588498801, -0.03278905153274536, 0.004065351095050573, 0.00993297342211008, 0.004738642834126949, -0.010956991463899612, -0.005908582825213671, 0.012441816739737988, -0.009477285668253899, -0.041022155433893204, -0.051037050783634186, -0.025641407817602158, 0.008048780262470245, -0.0029542914126068354, 0.04415564984083176, 0.0024294822942465544, -0.011079872958362103, 0.023859616369009018, -0.024453546851873398, -0.006246509030461311, -0.010793148539960384, -0.0309048593044281, -0.025477563962340355, 0.029041146859526634, 0.011212995275855064, -0.003747905371710658, 0.009415844455361366, 0.01599515974521637, 0.009001117199659348, -0.009922732599079609, -0.028099050745368004, -0.00941072404384613, 0.006154347211122513, 0.018708806484937668, 0.014735616743564606, -0.0012313814368098974, -0.00442119687795639, -0.04247625917196274, 0.0018880328861996531, -0.02818097174167633, 0.024822192266583443, 0.006092906463891268, -0.03252280876040459, 0.03502140939235687, 0.048948053270578384, -0.002961971564218402, -0.015964439138770103, -0.021156208589673042, -0.02088996395468712, -0.031416866928339005, 0.0026470862794667482, 0.01856544427573681, -0.023941537365317345, 0.021361012011766434, -0.01059858500957489, -0.006502513308078051, -0.005007447209209204, -0.0041703125461936, -0.05931111425161362, 0.00023616412363480777, 0.010875069536268711, -0.03366970643401146, -0.011643082834780216, -0.006681716535240412, -0.019282257184386253, 0.01185812707990408, -0.004495438188314438, -0.024986036121845245, -0.014653695747256279, -0.019937627017498016, 0.016271643340587616, -0.033997394144535065, -0.000680331839248538, 0.01089555025100708, -0.0074753304943442345, -0.06484080851078033, 0.01487897988408804, -0.0370284840464592, -0.029450753703713417, 0.019988829270005226, 0.005186650436371565, 0.011202755384147167, 0.0272388756275177, -0.008110221475362778, -0.019528020173311234, 0.07876745611429214, -0.05455967038869858, -0.007516290992498398, -0.02412586100399494, 0.009661608375608921, -0.0025792450178414583, -0.004874324891716242, -0.005831781774759293, 0.02064420096576214, -0.012257494032382965, 0.04317259415984154, -0.03237944468855858, 0.00173955038189888, -0.04780115187168121, -0.030679574236273766, 0.010337459854781628, 0.023450009524822235, -0.020582759752869606, 0.012298454530537128, 0.02318376488983631, -0.03432507812976837, 0.007838856428861618, 0.02074660174548626, 0.019149133935570717, -0.018145596608519554, -0.03950661048293114, -0.0006950521492399275, -0.029553154483437538, 0.02867249958217144, 0.016363805159926414, -0.006610035430639982, -0.029307391494512558, 0.038503073155879974, 0.01103891246020794, 0.022323589771986008, 0.007250046357512474, -0.029635077342391014, -0.032850492745637894, 0.0340588353574276, -0.02065443992614746, 0.04460621625185013, 0.04976726695895195, 0.010316980071365833, 0.01638428494334221, -0.007019642274826765, 0.02570284903049469, -0.01037330087274313, 0.02070564031600952, 0.0149301802739501, 0.006594675127416849, 0.012103891000151634, -0.03301433473825455, -0.005178970284759998, 0.00423431396484375, -0.007628933060914278, 0.025887170806527138, 0.03133494779467583, -0.04309067130088806, 0.039793334901332855, 0.006748277693986893, -0.0036275831516832113, 0.023593371734023094, 0.005616737995296717, 0.0040807113982737064, -0.01608731970191002, 0.048456523567438126, -0.04415564984083176, -0.012912864796817303, -0.012257494032382965, 0.02832433395087719, 0.002544684335589409, 0.02748463861644268, 0.02105380780994892, -0.02099236659705639, -0.009344163350760937, -0.00446727778762579, -0.03514429181814194, -0.006231148727238178, -0.0031488549429923296, -0.0235728919506073, -0.02709551341831684, -0.04091975465416908, -0.03618879243731499, 0.037909142673015594, 0.012738781981170177, -0.031273506581783295, 0.003896387992426753, 0.01768478751182556, 0.006236268673092127, 0.03174455463886261, -0.006118506658822298, 0.009272481314837933, -0.017305901274085045, -0.054150063544511795, -0.007219325751066208, 0.0130971884354949, 0.013455594889819622, -0.003136054612696171, -0.012482777237892151, -0.012267733924090862, -0.0032358963508158922, -0.02555948495864868, -0.0026496462523937225, 0.01738782413303852, 0.02891826443374157, 0.026603983715176582, 0.003233336377888918, -0.018882889300584793, -0.04792403429746628, 0.011049152351915836, -0.01391640305519104, 0.04354123771190643, -0.009415844455361366, -0.03663935884833336, -0.02242599055171013, 0.0333625003695488, 0.025928132236003876, -0.013936882838606834, 0.01758238673210144, 0.00643595214933157, -0.03418171778321266, 0.03358778730034828, -0.011714763939380646, -0.03254328668117523, 0.011694284155964851, -0.0198147464543581, -0.01185812707990408, 0.018104635179042816, 0.06660211831331253, -0.03397691249847412, -0.01044498197734356, 0.052020106464624405, -0.03205176070332527, 0.027177434414625168, -0.07282815128564835, 0.049152858555316925, -0.04825172200798988, -0.046531371772289276, 0.025825729593634605, 0.014674175530672073, -0.005345373414456844, 0.04010054096579552, -0.006789238192141056, 0.002626605797559023, -0.009451684542000294, 0.035328615456819534, -0.005401694215834141, -0.017008936032652855, -0.019732823595404625, -0.0006131306872703135, -0.030372370034456253, -0.009308322332799435, -0.029082106426358223, 0.017991993576288223, 0.0047488827258348465, 0.014428411610424519, 0.00817166268825531, 0.002288679825142026, 0.03964997082948685, 0.003059253329411149, 0.035287655889987946, -0.010409141890704632, 0.07528579235076904, 0.013537515886127949, 0.010496183298528194, -0.037110406905412674, -0.018903369084000587, -0.016568608582019806, 0.018360640853643417, 0.0232247244566679, -0.004882005043327808, 0.013936882838606834, -0.016005398705601692, -0.010158257558941841, -0.02207782492041588, 0.016896294429898262, 0.0011219395091757178, 0.0011974609224125743, -0.022507913410663605, 0.020377954468131065, -0.01649692840874195, 0.026563024148344994, 0.022303108125925064, 0.04546639323234558, -0.015196424908936024, -0.022548872977495193, -0.009886892512440681, 0.01367063820362091, -0.013271271251142025, -0.03977285325527191, 0.03950661048293114, 0.012554459273815155, 0.008555669337511063, -4.080071084899828e-05, 0.009461925365030766, 0.0007302527083083987, -0.000619530794210732, -0.0027827685698866844, -0.010588345117866993, 0.0004899285268038511, -0.012933345511555672, -0.002330920659005642, -0.00698380172252655, -0.013015267439186573, 0.03303481638431549, 0.032113201916217804, 0.01797151379287243, -0.020869484171271324, 0.010608824901282787, -0.000775053515098989, 0.0035635821986943483, -0.0031846954952925444, 0.04481102153658867, -0.04163656756281853, -0.06049897521734238, 0.03668031841516495, 0.010772667825222015, -0.015769874677062035, -0.009820330888032913, -0.017510704696178436, -0.012216532602906227, 0.023859616369009018, -0.03967045247554779, -0.07143548876047134, 0.008632470853626728, -0.01826847903430462, 0.0010144177358597517, -0.01220629271119833, -0.02034723572432995, 0.023347606882452965, 0.027771364897489548, -0.004183113109320402, -0.024945074692368507, -0.015053062699735165, -0.027300316840410233, 0.007654533721506596, 0.008739992044866085, -0.0034560603089630604, 0.021156208589673042, -0.017244460061192513, 0.0029491714667528868, -0.011960528790950775, -0.006446192506700754, -0.0019392338581383228, -0.01738782413303852, -0.01094675064086914, -0.038646433502435684, 0.02615341544151306, -0.012329175136983395, 0.012810463085770607, 0.012257494032382965, 0.00544777512550354, -0.02015267126262188, 0.007884937338531017, -0.0041600726544857025, -0.00046176803880371153, -0.03491900861263275, -0.014715136960148811, 0.021258611232042313, -0.010557624511420727, -0.0007635332876816392, 0.02744367904961109, 0.03407931327819824, -0.005621857941150665, -0.05021783709526062, -0.016773412004113197, 0.010875069536268711, -0.03045429103076458, -0.04091975465416908, -0.0333625003695488, -0.004265034571290016, -0.010660026222467422, 0.001943073933944106, 0.014100725762546062, 0.041124556213617325, 0.007567491848021746, 0.028099050745368004, -0.032358963042497635, -0.003194935619831085, 0.031621672213077545, 0.02441258542239666, 0.011581641621887684, -0.0070964437909424305, 0.02189350128173828, -0.0016473886789754033, 0.01354775670915842, 0.014305529184639454, 0.012114130891859531, -0.019026251509785652, -0.008038540370762348, 0.006072425749152899, 0.034447960555553436, 0.01227797381579876, 0.04501582682132721, 0.008750232867896557, -0.014694656245410442, 0.030495252460241318, 0.035431016236543655, -0.03715136647224426, -0.04710482060909271, 0.020971884950995445, -0.020336994901299477, -0.01614876091480255, -0.008770712651312351, 0.015022342093288898, 0.03942468762397766, 0.007895177230238914, -0.015964439138770103, 0.059106308966875076, 0.023142803460359573, -0.012841183692216873, -0.013977843336760998, 0.011141314171254635, 0.0375404953956604, 0.03000372275710106, -0.027853285893797874, 0.02748463861644268, -0.018114876002073288, 0.013568236492574215, -0.0023949218448251486, -0.03856451436877251, -0.00035296616260893643, -0.0068660397082567215, -0.02006050944328308, 0.017838390544056892, 0.03491900861263275, 0.011223236098885536, 0.016466207802295685, -0.023388568311929703, 0.021033326163887978, 0.012830943800508976, -0.010547383688390255, -0.01148948073387146, 0.01044498197734356, 0.04169800877571106, -0.031621672213077545, -0.042517222464084625, -0.00288517028093338, -0.0021389173343777657, -0.020029788836836815, -0.017060138285160065, -0.011212995275855064, -0.010357940569519997, -0.005102168768644333, 0.017459504306316376, 0.016415005549788475, -0.017695028334856033, -0.0022464392241090536, -0.022999441251158714, 0.008028300479054451, -0.017295662313699722, -0.0039117480628192425, -0.02609197422862053, 0.0340588353574276, 0.013906162232160568, -0.018483523279428482, 0.0232247244566679, 0.03016756661236286, -0.008606869727373123, -0.027894247323274612, 0.016322845593094826, 0.04788307473063469, -0.004528719000518322, 0.02633773908019066, 0.008227983489632607, -0.00977937038987875, -0.0025434044655412436, -0.011212995275855064, 0.009820330888032913, -0.009451684542000294, -0.006743157748132944, -0.00517385033890605, 0.02787376567721367, 0.04522062838077545, -0.0051277694292366505, 0.004782163538038731, -0.0031181343365460634, 0.01265686098486185, -0.06258796900510788, -0.022016383707523346, 0.003717184765264392, -0.015923477709293365, -0.007086203433573246, -0.017408303916454315, 0.005319772753864527, 0.011776205152273178, 0.019333457574248314, 0.004185672849416733, 0.01882144808769226, 0.011458760127425194, -0.04431949183344841, -0.024146340787410736, 0.022344069555401802, 0.027156952768564224, -0.004167752806097269, 0.0035482218954712152, 0.04599888250231743, -0.0005158489802852273, -0.027054551988840103, 0.003891267813742161, 0.026665424928069115, -0.028652019798755646, 0.0003868867352139205, 0.013578476384282112, -0.022118786349892616, 0.004920405801385641, -0.0002334440650884062, -0.0031846954952925444, 0.00881167408078909, 0.02312232367694378, -0.007219325751066208, 0.03420219570398331, 0.014674175530672073, -0.020336994901299477, 0.0037402252200990915, -0.01664029061794281, 0.000318565551424399, 0.024965554475784302, 0.0074702100828289986, 0.04530255123972893, 0.012585179880261421, 0.029778439551591873, -0.0001963234244612977, -0.011806925758719444, 0.008105101063847542, -0.008079500868916512, -0.0253137219697237, -0.005493856035172939, 0.03782721981406212, -0.0216477382928133, 0.03690560534596443, 0.03371066972613335, 0.012114130891859531, -0.02912306785583496, 0.009518246166408062, 0.0035405417438596487, 0.008048780262470245, -0.0038503070827573538, -0.01882144808769226, -0.040735431015491486, 0.06094954162836075, -0.01206293050199747, 0.03727424889802933, 0.0033613385166972876, -0.015329547226428986, 0.01369111891835928, -0.05501024052500725, -0.007936138659715652, 0.011653323657810688, 0.02713647298514843, 0.021709179505705833, -0.006420591846108437, -0.0016358685679733753, -0.014029044657945633, -0.01985570602118969, 0.0009254561155103147, 0.061277229338884354, -0.03795010223984718, 0.008658071048557758, -0.028058089315891266, -0.009748649783432484, -0.018401600420475006, -0.01590299792587757, -0.0272388756275177, 0.019005771726369858, 0.0067994785495102406, -0.021606776863336563, 0.0025958851911127567, -0.04804691672325134, -0.029327871277928352, 0.0005596897681243718, -0.024576429277658463, -0.01569819450378418, 0.050832245498895645, 0.005975143983960152, 0.03512381389737129, -0.03381307050585747, -0.02521131932735443, -0.0006137706805020571, -0.0037351050414144993, -0.011571401730179787, 0.002841649577021599, 0.004720722325146198, 0.0034150995779782534, 0.005621857941150665, 0.013496555387973785, -0.027894247323274612, 0.03799106180667877, -0.02684974856674671, 0.00792077835649252, 0.006830199155956507, 0.02684974856674671, 0.031969837844371796, 0.010567864403128624, -0.0034586202818900347, -0.03663935884833336, 0.01753118634223938, -0.00480520399287343, 0.05754980444908142, 0.021115249022841454, 0.039445169270038605, 0.010168497450649738, 0.03215416148304939, 0.00673291739076376, -0.029204988852143288, 0.007337087765336037, -0.03334202244877815, 0.032461367547512054, -0.010977471247315407, -0.03749953582882881, 0.026665424928069115, -0.03180599585175514, -0.006000744644552469, -0.004820564296096563, 0.03846210986375809, 0.008745112456381321, -0.025518525391817093, 0.010071215219795704, 0.03174455463886261, 0.02783280611038208, -0.016322845593094826, -0.014694656245410442, 0.03045429103076458, -0.012769502587616444, -0.04571215808391571, -0.0004825684300158173, 0.0038938280194997787, 0.03555389866232872, -0.006144107319414616, 2.1620377083308995e-05, -0.018954571336507797, -0.004265034571290016, -0.0062413886189460754, -0.020285794511437416, 0.013660398311913013, 0.03739713132381439, 0.014489852823317051, 0.027771364897489548, 0.023757213726639748, -0.009052317589521408, -0.02301992103457451, 0.00821774359792471, -0.017049897462129593, -0.012687581591308117, -0.008760472759604454, -0.016722211614251137, 0.015780115500092506, 0.0028800503350794315, -0.002380841411650181, -0.014418171718716621, 0.012646620161831379, 0.02381865493953228, -0.021422453224658966, -0.01039890106767416, 0.023736733943223953, -0.029143547639250755, 0.016875814646482468, -0.04931670054793358, -0.008929436095058918, 0.02179110050201416, -0.031724072992801666, 0.0432954765856266, 0.03571774438023567, 0.029696518555283546, -0.026726866140961647, 0.0006176107563078403, -0.04841556400060654, -0.002734127687290311, -0.030085645616054535, 0.029839880764484406, 0.010567864403128624, 0.02238503098487854, -0.03784770146012306, -0.008545429445803165, 0.015216905623674393, -0.004930646158754826, -0.021033326163887978, -0.01718301884829998, 0.022118786349892616, -0.012810463085770607, -0.03035189025104046, -0.01831967942416668, 0.006615155376493931, -0.005089368671178818, 0.009282722137868404, 0.011847886256873608, -0.03100726008415222, -0.023040402680635452, -0.033546824008226395, 0.020019549876451492, -0.023163283243775368, -0.009794730693101883, -0.0029696517158299685, -0.0130971884354949, -0.009958573617041111, 0.03237944468855858, 0.012370135635137558, -0.0056525785475969315, 0.02263079397380352, 0.0336901880800724, 0.006579314824193716, 0.017889592796564102, 0.018196796998381615, -0.03383354842662811, -0.013680879026651382, -0.012533978559076786, -0.022364549338817596, 0.009292962029576302, -0.008386706002056599, -0.008048780262470245, 0.0450567863881588, 0.028099050745368004, -0.016056600958108902, -0.047022901475429535, 0.02412586100399494, -0.002428202424198389, -0.0020301153417676687, 0.01643548719584942, 0.01927201636135578, 0.013773039914667606, -0.006922360509634018, -0.014182647690176964, 0.00935952365398407, -0.009989294223487377, 0.008094861172139645, -0.01737758331000805, -0.015216905623674393, 0.0033382982946932316, -0.02713647298514843, -0.030085645616054535, 0.025231800973415375, 0.013814001344144344, -0.023060882464051247, 0.04304971173405647, 0.00407303124666214, 0.0020173152443021536, 0.04980823025107384, 0.03782721981406212, 0.005596257746219635, 0.0309048593044281, -0.041677527129650116, -0.011438279412686825, -0.002384681487455964, -0.022507913410663605, -0.004208713304251432, -0.006528113968670368, 0.03506237268447876, 0.022098304703831673, 0.015206664800643921, 0.0019507539691403508, -0.010219697840511799, 0.041083596646785736, -0.00551945623010397, -0.04034630209207535, -0.01821727678179741, -0.0017805109964683652, -0.01857568323612213, 0.016312604770064354, -0.012400856241583824, 0.012994786724448204, -0.0062772296369075775, -0.0017817910993471742, 0.0007673733634874225, 0.023101842030882835, 0.019128654152154922, 0.015288586728274822, 0.03727424889802933, -0.009236641228199005, 0.023511450737714767, -0.019374417141079903, 0.022958479821681976, 0.00817166268825531, -0.007480450440198183, -0.012237013317644596, 0.015411469154059887, 0.015718674287199974, -0.006282349582761526, 0.03715136647224426, -0.03164215013384819, 0.0043879165314137936, -0.023634331300854683, 0.026603983715176582, 0.010639545507729053, 0.028713461011648178, -0.025375163182616234, -0.016824614256620407, 0.0074702100828289986, -0.00450823875144124, 0.012021970003843307, -0.007040122989565134, -0.015124743804335594, 0.02228262834250927, 0.02490411512553692, 0.012830943800508976, -0.024474026635289192, -0.01793055236339569, -0.02168869785964489, -0.06398063898086548, 0.028549617156386375, 0.021381493657827377, 0.006200428120791912, 0.014592254534363747, -0.026235338300466537, 0.003983429633080959, 0.0033510983921587467, 0.017070377245545387, -0.01005585491657257, 0.018483523279428482, -0.014377210289239883, -0.002320680534467101, 0.00647691311314702, -0.01403928454965353, 0.036864642053842545, -0.033403463661670685, -0.004759123083204031, 0.011950287967920303, -0.04882517084479332, 0.015831315889954567, 0.010153137147426605, -0.00735244806855917, 0.010178737342357635, 0.005591137334704399, -0.02079780213534832, 0.018657606095075607, 0.038748834282159805, -0.008320145308971405, -0.01976354420185089, -0.036967046558856964, 0.003315257839858532, -0.004794963635504246, 0.0333625003695488, -0.0070964437909424305, -0.044196609407663345, 0.027320796623826027, -0.029737478122115135, 0.025477563962340355, 0.04841556400060654, 0.009948333725333214, -0.0030208525713533163, -0.014766337350010872, -0.01426456868648529, -0.02521131932735443, 0.01683485321700573, -0.007183485198765993, -0.03020852617919445, 0.007035002578049898, 0.04362316057085991, -0.02119717001914978, 0.007562371902167797, 0.036270711570978165], index=0, object='embedding')], model='text-embedding-3-small', object='list', usage=Usage(prompt_tokens=7, total_tokens=7))\n", + "response_text: EmbedderOutput(data=[Embedding(embedding=[0.04169800877571106, 0.0158005952835083, 0.028160491958260536, 0.024351144209504128, -0.023142803460359573, -0.002739247865974903, -0.014223608188331127, 0.01433624979108572, 0.010834109038114548, -0.010199218057096004, 0.006942841224372387, -0.024043940007686615, -0.06164587661623955, -0.01508378330618143, -0.014233848080039024, 0.023163283243775368, -0.006625395733863115, 0.019446099177002907, 0.07241854071617126, -0.024392105638980865, 0.003002932295203209, -0.010091695934534073, -0.04100167378783226, 0.011970768682658672, 0.06209644302725792, 0.0070964437909424305, -0.04554831609129906, -0.007347328122705221, 0.00364038348197937, 0.03942468762397766, 0.04214857518672943, -0.0251498781144619, -0.0019558740314096212, 0.04309067130088806, -0.024535467848181725, -0.03995717689394951, -0.03764289617538452, -0.039342764765024185, 0.021320052444934845, 0.029676036909222603, -0.003136054612696171, -0.01302550733089447, 0.00684555945917964, 0.013230310752987862, -0.027320796623826027, -0.030679574236273766, -0.009221280924975872, -0.039936695247888565, -0.03360826522111893, 0.02748463861644268, 0.03883075714111328, 0.004044870380312204, 0.03252280876040459, 0.03262520954012871, -0.016814373433589935, 0.004218953661620617, 0.024678830057382584, 0.009641128592193127, 0.04665425419807434, 0.015544591471552849, 0.036127351224422455, -0.010265778750181198, 0.026358218863606453, 0.0043085552752017975, 0.0005580897559411824, 0.0354514978826046, -0.0039322287775576115, 0.03788866102695465, 0.05906534940004349, 0.04612176492810249, -0.011059393174946308, 0.016312604770064354, -0.00918543990701437, 0.004631120711565018, -0.006594675127416849, -0.018145596608519554, -0.003968069329857826, -0.0059649040922522545, -0.03207223862409592, -0.031867437064647675, -0.036168310791254044, 0.0010604985291138291, -0.01807391457259655, -0.008606869727373123, 0.008248464204370975, -0.044647179543972015, 0.026767827570438385, 0.03383354842662811, -0.022917520254850388, -0.04767827317118645, 0.0033997392747551203, 0.011141314171254635, -0.025928132236003876, 0.027115993201732635, -0.010388661175966263, 0.01921057514846325, 0.03549245744943619, 0.0011750605190172791, -0.06819958984851837, 0.000605450535658747, 0.019323216751217842, -0.023982498794794083, -0.031109662726521492, 0.026972630992531776, 0.02560044638812542, 0.040182460099458694, 0.015862036496400833, -0.004974166862666607, 0.003153975121676922, -0.03852355107665062, -0.025661887601017952, 0.011212995275855064, 0.0033536585979163647, 0.02431018464267254, -0.04812883958220482, -0.029102588072419167, -0.023859616369009018, -0.02416682057082653, 0.02902066521346569, -0.02574380859732628, 0.033157698810100555, 0.052511636167764664, -0.04718674346804619, 0.010337459854781628, 0.010752187110483646, -0.013424874283373356, -0.0027725284453481436, -0.002777648391202092, 0.03491900861263275, -0.03870787471532822, 0.01074194721877575, -0.02752560004591942, 0.024535467848181725, 0.033055298030376434, 0.031232545152306557, 0.01897505111992359, 0.026952149346470833, -0.016937255859375, -0.018544962629675865, 0.010782907716929913, 0.007931018248200417, 0.013189350254833698, 0.021668218076229095, 0.003315257839858532, -0.02668590471148491, -0.01458201464265585, -0.04143176227807999, 0.040530625730752945, 0.01154068112373352, -0.042312417179346085, 0.040428224951028824, -0.02312232367694378, -0.0038989479653537273, 0.01604636013507843, -0.0056525785475969315, -0.036721281707286835, -0.008970396593213081, 0.019824985414743423, 0.0059649040922522545, 0.04341835901141167, -0.03878979757428169, 0.04927574098110199, -0.03719232976436615, -0.006026345305144787, 0.012257494032382965, 0.03287097439169884, -0.03643455356359482, -0.02140197344124317, 0.00695820152759552, -0.005381213966757059, -0.02461738884449005, 0.004137032199651003, 0.054354868829250336, 0.021156208589673042, 0.03006516396999359, -0.024392105638980865, -0.04943958297371864, 0.0406944714486599, 0.0003852867230307311, -0.01936417818069458, -0.028344813734292984, -0.02803760953247547, 0.011735244654119015, 0.013045987114310265, 0.061277229338884354, 0.029532674700021744, -0.011284676380455494, -0.025477563962340355, -0.014428411610424519, 0.012564699165523052, 0.03582014515995979, -0.02020387165248394, 0.06160491332411766, -0.008207502774894238, -0.043950848281383514, 0.0198147464543581, 0.03352634608745575, 0.01265686098486185, 0.012267733924090862, -0.007997579872608185, -0.020490597933530807, 0.02193446271121502, -0.00551945623010397, 0.014377210289239883, -0.02158629707992077, 0.030536212027072906, -0.011591882444918156, -0.013496555387973785, -0.01398808415979147, -0.010286259464919567, 0.0009939373703673482, -0.003008052473887801, -0.02521131932735443, 0.00474120257422328, 0.0012096210848540068, 0.025026995688676834, -9.424164454685524e-05, 0.01112083438783884, 0.004208713304251432, 0.024494506418704987, 0.022815117612481117, 0.015216905623674393, 0.003947588615119457, -0.01148948073387146, -0.05591137334704399, 0.047473467886447906, 0.06185067817568779, 0.011110593564808369, 0.007116924040019512, -0.0036890243645757437, 0.021012846380472183, -0.03192887827754021, 0.0009395363740622997, -0.011223236098885536, -0.03283001109957695, 0.017705269157886505, 0.014141686260700226, 0.02832433395087719, -0.03524669632315636, 0.022815117612481117, -0.010803388431668282, 0.021135728806257248, 0.02863154001533985, -0.006625395733863115, -0.012298454530537128, -0.005204570945352316, 0.027464158833026886, 0.036270711570978165, 0.005877862684428692, 0.04337739571928978, 0.057426922023296356, -0.0076238131150603294, -0.0018624324584379792, -0.005703779403120279, -0.019743064418435097, 0.059556879103183746, -0.024494506418704987, -0.02818097174167633, -0.0359635055065155, -0.018145596608519554, 0.006650995928794146, 0.004362315870821476, -0.002106916857883334, -0.014326009899377823, 0.020869484171271324, 0.00018768326845020056, -0.01986594684422016, -0.024678830057382584, -0.014684416353702545, -0.008709271438419819, 0.009738409891724586, -0.003530301619321108, -0.0166812501847744, 0.009892012923955917, -0.019005771726369858, 0.015872277319431305, -0.01856544427573681, -0.00817166268825531, -0.021258611232042313, 0.0370284840464592, -0.0268907081335783, 0.04481102153658867, -0.012892385013401508, 0.0419028103351593, -0.051774341613054276, 0.0009952173568308353, 0.04423757269978523, 0.021258611232042313, -0.012605659663677216, 0.03065909445285797, 0.021033326163887978, 0.01985570602118969, -0.019435858353972435, -0.002831409452483058, -0.0029978123493492603, -0.04427853226661682, -0.003950148820877075, 0.0011648202780634165, 0.026870228350162506, -0.001858592382632196, -0.022753676399588585, 0.022466951981186867, -0.005186650436371565, 0.010035375133156776, -0.04517966881394386, 0.06574194878339767, -0.0051431297324597836, 0.047063861042261124, 0.05214298889040947, 0.00638987123966217, -0.039240363985300064, -0.03143734857439995, 0.024637870490550995, -0.03422267735004425, -0.010224818252027035, 0.045589275658130646, -0.013240550644695759, -0.0004217673558741808, 0.029635077342391014, -0.00687115965411067, 0.025129398331046104, 0.00804365985095501, 0.02451498806476593, -0.008376466110348701, -0.0023782814387232065, 0.01683485321700573, 0.012370135635137558, 0.02650158293545246, -0.03506237268447876, -0.02381865493953228, -0.0005033687921240926, 0.011407558806240559, 0.004651600960642099, -0.00990737322717905, -0.026112455874681473, -0.02099236659705639, 0.004933205898851156, 0.03901508077979088, 0.0013401834294199944, -0.014151927083730698, -0.0333625003695488, 0.04640848934650421, 0.009205920621752739, 0.03094581887125969, 0.003264056984335184, -0.026071494445204735, 0.018852168694138527, 0.02465835027396679, 0.012237013317644596, 0.0034663004335016012, -0.027402717620134354, -0.007209085859358311, -0.009190560318529606, -0.008176782168447971, -0.027771364897489548, 0.002693166956305504, 0.0702066645026207, -0.022405510768294334, -0.06353006511926651, 0.03995717689394951, -0.04046918451786041, -0.0492347776889801, -0.025784770026803017, -0.04837460443377495, -0.03381307050585747, -0.0039271083660423756, 0.013353193178772926, 0.004339275881648064, -0.020275553688406944, 0.06266989558935165, -0.03268665075302124, 0.0050637684762477875, -0.004106311593204737, -0.02090020477771759, 0.0425991415977478, -0.030085645616054535, 0.04235338047146797, 0.02119717001914978, 0.013793520629405975, -0.01633308455348015, -0.028590578585863113, -0.01782815158367157, 0.015472909435629845, -0.026112455874681473, 0.06140011176466942, 0.014418171718716621, 0.0824129581451416, -0.04210761561989784, -0.009810090996325016, 0.03045429103076458, -0.005196890793740749, 0.010414261370897293, 0.03174455463886261, 0.03784770146012306, -0.07372928410768509, 0.00563209829851985, 0.01000465452671051, 0.018135355785489082, -0.007413889281451702, -0.038892198354005814, 0.021750139072537422, -0.0187702476978302, -0.03147830814123154, -0.049644384533166885, 0.07581828534603119, 0.02055203914642334, 0.026010053232312202, 0.0049127256497740746, 0.014817538671195507, -0.03244088590145111, -0.004838484339416027, -0.06045801565051079, 0.008407186716794968, -0.011806925758719444, 0.002859569853171706, 0.05161049962043762, 0.06066281720995903, -0.06926456838846207, 0.026276297867298126, -0.015677712857723236, -0.003386939177289605, -0.0044570378959178925, -0.046531371772289276, 0.00856590922921896, -0.022303108125925064, -0.008227983489632607, -0.015493390150368214, -0.04690001904964447, 0.003983429633080959, 0.02867249958217144, -0.0010368181392550468, -0.0045363991521298885, 0.0017062698025256395, 0.0016051479615271091, 0.0011558601399883628, -0.007229566108435392, 0.006482033059000969, 0.04550735279917717, -0.03199031949043274, 0.023347606882452965, 0.016957735642790794, 0.0008672151016071439, 0.002657326404005289, -0.013865201734006405, -0.03676224127411842, -0.018729286268353462, 0.03743809461593628, 0.013066467829048634, -0.04616272449493408, 0.046777136623859406, -0.022446472197771072, 0.007966859266161919, -0.02134053222835064, -0.01714205928146839, -0.007772295735776424, 0.03743809461593628, 0.026071494445204735, 0.0901135727763176, 0.008571029640734196, 0.0002102436701534316, 0.003084853757172823, 0.059597838670015335, 0.013240550644695759, 0.027853285893797874, 0.034447960555553436, 0.023654812946915627, 0.026583503931760788, 0.015821075066924095, -0.04046918451786041, -0.04603984206914902, -0.005135449580848217, 0.04509774595499039, 0.010158257558941841, 0.014305529184639454, -0.027791844680905342, -0.020971884950995445, -0.058164212852716446, 0.014991621486842632, -0.05423198640346527, -0.024781232699751854, -0.03844163194298744, 0.008611990138888359, -0.0031642152462154627, -0.02584621123969555, -0.08204431086778641, 0.006246509030461311, 0.005030487664043903, 0.03838019073009491, -0.032113201916217804, 0.02203686349093914, 0.04186185076832771, -0.013783280737698078, -0.0034995810128748417, -0.005806181114166975, 0.02818097174167633, -0.008089740760624409, -0.04341835901141167, -0.01732638292014599, -0.017705269157886505, -0.05644386261701584, -0.015964439138770103, 0.015012102201581001, 0.006722677033394575, 0.009948333725333214, 0.04218953475356102, 0.05820517614483833, 0.04694097861647606, 0.026030534878373146, -0.023654812946915627, -0.010516663081943989, 0.014520573429763317, -0.04829268157482147, 0.012626140378415585, 0.020080991089344025, -0.011755725368857384, 0.008253583684563637, -0.02381865493953228, 0.012011729180812836, -0.0015705873956903815, -0.026808787137269974, 0.025047477334737778, 0.01603611931204796, -0.002360361162573099, 0.006313070189207792, 0.027607521042227745, -0.008007819764316082, -0.009784490801393986, 0.001804831437766552, 0.03153974935412407, -0.056525785475969315, 0.005724259652197361, -0.021504374220967293, -0.011581641621887684, -0.0017830710858106613, -0.009118879213929176, 0.0008339345222339034, -0.009513125754892826, 0.04927574098110199, 0.016599329188466072, 0.04358220100402832, -0.0006348910974338651, -0.003113014390692115, 0.005381213966757059, -0.014244087971746922, -0.03608638793230057, -0.01856544427573681, 0.006313070189207792, -0.05136473476886749, -0.01970210298895836, -0.03362874686717987, -0.022446472197771072, 0.0576317235827446, -0.04431949183344841, -0.0436641201376915, -0.0021849980112165213, 0.008924315683543682, -0.028201451525092125, -0.0027981288731098175, 0.04235338047146797, -0.003909188322722912, 0.029245950281620026, 0.028733940795063972, 0.037417612969875336, 0.005606497637927532, 0.004183113109320402, 0.004213833715766668, 0.006394991651177406, 0.003141174791380763, -0.010255538858473301, -0.03455036133527756, 0.019599702209234238, 0.04354123771190643, 0.017049897462129593, -0.013568236492574215, 0.01190932746976614, 0.010700986720621586, -0.03180599585175514, 0.0026624463498592377, -0.006400111597031355, 0.05996648594737053, 0.013629677705466747, 0.0020390755962580442, 0.011929808184504509, -0.05472351610660553, -0.0569353923201561, 0.0225283931940794, -0.004899925552308559, 0.015923477709293365, -0.018299199640750885, -0.056771550327539444, -0.01986594684422016, 0.06279277801513672, 0.007444609887897968, -0.005831781774759293, 0.0036839041858911514, 0.005360733717679977, 0.00666123628616333, -0.023060882464051247, -0.025969093665480614, -0.009876652620732784, 0.0010195377981290221, 0.008673431351780891, -0.0253137219697237, -0.04218953475356102, 0.02476075105369091, -0.014797057956457138, 0.025539005175232887, -0.027709923684597015, 0.013936882838606834, 0.005785700865089893, 0.010368180461227894, -0.0006460912409238517, 0.005714019760489464, 0.01344535406678915, -0.006302829831838608, 0.012185812927782536, -0.0016345884650945663, 0.0309048593044281, -0.004649041220545769, 0.022671755403280258, -0.006533233914524317, 0.028652019798755646, -0.025887170806527138, 0.0092571210116148, -0.01698845624923706, 0.018084155395627022, 0.011161794885993004, 0.010388661175966263, 0.022446472197771072, -0.02639918029308319, -0.01643548719584942, 0.021852541714906693, -0.015565071254968643, 0.010311859659850597, -0.02207782492041588, -0.03278905153274536, 0.022016383707523346, -0.0009350563050247729, 0.00790029764175415, -0.0403258241713047, 0.045630235224962234, 0.005114969331771135, -0.008489107713103294, 0.01793055236339569, -0.021852541714906693, -0.001845792168751359, 0.024924594908952713, 0.04143176227807999, 0.006640756037086248, -0.014592254534363747, 0.030781976878643036, 0.029204988852143288, 0.004679761826992035, 0.007265406660735607, 0.004825684241950512, 0.022548872977495193, 0.03119158372282982, -0.0024730032309889793, -0.026010053232312202, 0.06570098549127579, -0.02451498806476593, -0.027361758053302765, 0.03360826522111893, -0.015339787118136883, -0.034591324627399445, 0.005975143983960152, -0.03524669632315636, 0.00684555945917964, 0.05255259573459625, 0.032809533178806305, -0.03348538279533386, -0.00364038348197937, 0.02392105758190155, 0.03129398450255394, -0.03598398715257645, -0.008560789749026299, -0.007567491848021746, -0.0017190700164064765, -0.025661887601017952, 0.009579687379300594, 0.04530255123972893, 0.028488175943493843, -0.01124371588230133, 0.03907652199268341, -0.0022566793486475945, 0.009164960123598576, 0.019876185804605484, 0.05017687380313873, 0.027894247323274612, -0.012370135635137558, -0.01876000687479973, -0.0022029185201972723, -0.02238503098487854, 0.04067398980259895, 0.025375163182616234, -0.03723328933119774, 0.03817538544535637, 0.013291751965880394, 0.013936882838606834, 0.01975330524146557, -0.005790820810943842, 0.00148226588498801, -0.03278905153274536, 0.004065351095050573, 0.00993297342211008, 0.004738642834126949, -0.010956991463899612, -0.005908582825213671, 0.012441816739737988, -0.009477285668253899, -0.041022155433893204, -0.051037050783634186, -0.025641407817602158, 0.008048780262470245, -0.0029542914126068354, 0.04415564984083176, 0.0024294822942465544, -0.011079872958362103, 0.023859616369009018, -0.024453546851873398, -0.006246509030461311, -0.010793148539960384, -0.0309048593044281, -0.025477563962340355, 0.029041146859526634, 0.011212995275855064, -0.003747905371710658, 0.009415844455361366, 0.01599515974521637, 0.009001117199659348, -0.009922732599079609, -0.028099050745368004, -0.00941072404384613, 0.006154347211122513, 0.018708806484937668, 0.014735616743564606, -0.0012313814368098974, -0.00442119687795639, -0.04247625917196274, 0.0018880328861996531, -0.02818097174167633, 0.024822192266583443, 0.006092906463891268, -0.03252280876040459, 0.03502140939235687, 0.048948053270578384, -0.002961971564218402, -0.015964439138770103, -0.021156208589673042, -0.02088996395468712, -0.031416866928339005, 0.0026470862794667482, 0.01856544427573681, -0.023941537365317345, 0.021361012011766434, -0.01059858500957489, -0.006502513308078051, -0.005007447209209204, -0.0041703125461936, -0.05931111425161362, 0.00023616412363480777, 0.010875069536268711, -0.03366970643401146, -0.011643082834780216, -0.006681716535240412, -0.019282257184386253, 0.01185812707990408, -0.004495438188314438, -0.024986036121845245, -0.014653695747256279, -0.019937627017498016, 0.016271643340587616, -0.033997394144535065, -0.000680331839248538, 0.01089555025100708, -0.0074753304943442345, -0.06484080851078033, 0.01487897988408804, -0.0370284840464592, -0.029450753703713417, 0.019988829270005226, 0.005186650436371565, 0.011202755384147167, 0.0272388756275177, -0.008110221475362778, -0.019528020173311234, 0.07876745611429214, -0.05455967038869858, -0.007516290992498398, -0.02412586100399494, 0.009661608375608921, -0.0025792450178414583, -0.004874324891716242, -0.005831781774759293, 0.02064420096576214, -0.012257494032382965, 0.04317259415984154, -0.03237944468855858, 0.00173955038189888, -0.04780115187168121, -0.030679574236273766, 0.010337459854781628, 0.023450009524822235, -0.020582759752869606, 0.012298454530537128, 0.02318376488983631, -0.03432507812976837, 0.007838856428861618, 0.02074660174548626, 0.019149133935570717, -0.018145596608519554, -0.03950661048293114, -0.0006950521492399275, -0.029553154483437538, 0.02867249958217144, 0.016363805159926414, -0.006610035430639982, -0.029307391494512558, 0.038503073155879974, 0.01103891246020794, 0.022323589771986008, 0.007250046357512474, -0.029635077342391014, -0.032850492745637894, 0.0340588353574276, -0.02065443992614746, 0.04460621625185013, 0.04976726695895195, 0.010316980071365833, 0.01638428494334221, -0.007019642274826765, 0.02570284903049469, -0.01037330087274313, 0.02070564031600952, 0.0149301802739501, 0.006594675127416849, 0.012103891000151634, -0.03301433473825455, -0.005178970284759998, 0.00423431396484375, -0.007628933060914278, 0.025887170806527138, 0.03133494779467583, -0.04309067130088806, 0.039793334901332855, 0.006748277693986893, -0.0036275831516832113, 0.023593371734023094, 0.005616737995296717, 0.0040807113982737064, -0.01608731970191002, 0.048456523567438126, -0.04415564984083176, -0.012912864796817303, -0.012257494032382965, 0.02832433395087719, 0.002544684335589409, 0.02748463861644268, 0.02105380780994892, -0.02099236659705639, -0.009344163350760937, -0.00446727778762579, -0.03514429181814194, -0.006231148727238178, -0.0031488549429923296, -0.0235728919506073, -0.02709551341831684, -0.04091975465416908, -0.03618879243731499, 0.037909142673015594, 0.012738781981170177, -0.031273506581783295, 0.003896387992426753, 0.01768478751182556, 0.006236268673092127, 0.03174455463886261, -0.006118506658822298, 0.009272481314837933, -0.017305901274085045, -0.054150063544511795, -0.007219325751066208, 0.0130971884354949, 0.013455594889819622, -0.003136054612696171, -0.012482777237892151, -0.012267733924090862, -0.0032358963508158922, -0.02555948495864868, -0.0026496462523937225, 0.01738782413303852, 0.02891826443374157, 0.026603983715176582, 0.003233336377888918, -0.018882889300584793, -0.04792403429746628, 0.011049152351915836, -0.01391640305519104, 0.04354123771190643, -0.009415844455361366, -0.03663935884833336, -0.02242599055171013, 0.0333625003695488, 0.025928132236003876, -0.013936882838606834, 0.01758238673210144, 0.00643595214933157, -0.03418171778321266, 0.03358778730034828, -0.011714763939380646, -0.03254328668117523, 0.011694284155964851, -0.0198147464543581, -0.01185812707990408, 0.018104635179042816, 0.06660211831331253, -0.03397691249847412, -0.01044498197734356, 0.052020106464624405, -0.03205176070332527, 0.027177434414625168, -0.07282815128564835, 0.049152858555316925, -0.04825172200798988, -0.046531371772289276, 0.025825729593634605, 0.014674175530672073, -0.005345373414456844, 0.04010054096579552, -0.006789238192141056, 0.002626605797559023, -0.009451684542000294, 0.035328615456819534, -0.005401694215834141, -0.017008936032652855, -0.019732823595404625, -0.0006131306872703135, -0.030372370034456253, -0.009308322332799435, -0.029082106426358223, 0.017991993576288223, 0.0047488827258348465, 0.014428411610424519, 0.00817166268825531, 0.002288679825142026, 0.03964997082948685, 0.003059253329411149, 0.035287655889987946, -0.010409141890704632, 0.07528579235076904, 0.013537515886127949, 0.010496183298528194, -0.037110406905412674, -0.018903369084000587, -0.016568608582019806, 0.018360640853643417, 0.0232247244566679, -0.004882005043327808, 0.013936882838606834, -0.016005398705601692, -0.010158257558941841, -0.02207782492041588, 0.016896294429898262, 0.0011219395091757178, 0.0011974609224125743, -0.022507913410663605, 0.020377954468131065, -0.01649692840874195, 0.026563024148344994, 0.022303108125925064, 0.04546639323234558, -0.015196424908936024, -0.022548872977495193, -0.009886892512440681, 0.01367063820362091, -0.013271271251142025, -0.03977285325527191, 0.03950661048293114, 0.012554459273815155, 0.008555669337511063, -4.080071084899828e-05, 0.009461925365030766, 0.0007302527083083987, -0.000619530794210732, -0.0027827685698866844, -0.010588345117866993, 0.0004899285268038511, -0.012933345511555672, -0.002330920659005642, -0.00698380172252655, -0.013015267439186573, 0.03303481638431549, 0.032113201916217804, 0.01797151379287243, -0.020869484171271324, 0.010608824901282787, -0.000775053515098989, 0.0035635821986943483, -0.0031846954952925444, 0.04481102153658867, -0.04163656756281853, -0.06049897521734238, 0.03668031841516495, 0.010772667825222015, -0.015769874677062035, -0.009820330888032913, -0.017510704696178436, -0.012216532602906227, 0.023859616369009018, -0.03967045247554779, -0.07143548876047134, 0.008632470853626728, -0.01826847903430462, 0.0010144177358597517, -0.01220629271119833, -0.02034723572432995, 0.023347606882452965, 0.027771364897489548, -0.004183113109320402, -0.024945074692368507, -0.015053062699735165, -0.027300316840410233, 0.007654533721506596, 0.008739992044866085, -0.0034560603089630604, 0.021156208589673042, -0.017244460061192513, 0.0029491714667528868, -0.011960528790950775, -0.006446192506700754, -0.0019392338581383228, -0.01738782413303852, -0.01094675064086914, -0.038646433502435684, 0.02615341544151306, -0.012329175136983395, 0.012810463085770607, 0.012257494032382965, 0.00544777512550354, -0.02015267126262188, 0.007884937338531017, -0.0041600726544857025, -0.00046176803880371153, -0.03491900861263275, -0.014715136960148811, 0.021258611232042313, -0.010557624511420727, -0.0007635332876816392, 0.02744367904961109, 0.03407931327819824, -0.005621857941150665, -0.05021783709526062, -0.016773412004113197, 0.010875069536268711, -0.03045429103076458, -0.04091975465416908, -0.0333625003695488, -0.004265034571290016, -0.010660026222467422, 0.001943073933944106, 0.014100725762546062, 0.041124556213617325, 0.007567491848021746, 0.028099050745368004, -0.032358963042497635, -0.003194935619831085, 0.031621672213077545, 0.02441258542239666, 0.011581641621887684, -0.0070964437909424305, 0.02189350128173828, -0.0016473886789754033, 0.01354775670915842, 0.014305529184639454, 0.012114130891859531, -0.019026251509785652, -0.008038540370762348, 0.006072425749152899, 0.034447960555553436, 0.01227797381579876, 0.04501582682132721, 0.008750232867896557, -0.014694656245410442, 0.030495252460241318, 0.035431016236543655, -0.03715136647224426, -0.04710482060909271, 0.020971884950995445, -0.020336994901299477, -0.01614876091480255, -0.008770712651312351, 0.015022342093288898, 0.03942468762397766, 0.007895177230238914, -0.015964439138770103, 0.059106308966875076, 0.023142803460359573, -0.012841183692216873, -0.013977843336760998, 0.011141314171254635, 0.0375404953956604, 0.03000372275710106, -0.027853285893797874, 0.02748463861644268, -0.018114876002073288, 0.013568236492574215, -0.0023949218448251486, -0.03856451436877251, -0.00035296616260893643, -0.0068660397082567215, -0.02006050944328308, 0.017838390544056892, 0.03491900861263275, 0.011223236098885536, 0.016466207802295685, -0.023388568311929703, 0.021033326163887978, 0.012830943800508976, -0.010547383688390255, -0.01148948073387146, 0.01044498197734356, 0.04169800877571106, -0.031621672213077545, -0.042517222464084625, -0.00288517028093338, -0.0021389173343777657, -0.020029788836836815, -0.017060138285160065, -0.011212995275855064, -0.010357940569519997, -0.005102168768644333, 0.017459504306316376, 0.016415005549788475, -0.017695028334856033, -0.0022464392241090536, -0.022999441251158714, 0.008028300479054451, -0.017295662313699722, -0.0039117480628192425, -0.02609197422862053, 0.0340588353574276, 0.013906162232160568, -0.018483523279428482, 0.0232247244566679, 0.03016756661236286, -0.008606869727373123, -0.027894247323274612, 0.016322845593094826, 0.04788307473063469, -0.004528719000518322, 0.02633773908019066, 0.008227983489632607, -0.00977937038987875, -0.0025434044655412436, -0.011212995275855064, 0.009820330888032913, -0.009451684542000294, -0.006743157748132944, -0.00517385033890605, 0.02787376567721367, 0.04522062838077545, -0.0051277694292366505, 0.004782163538038731, -0.0031181343365460634, 0.01265686098486185, -0.06258796900510788, -0.022016383707523346, 0.003717184765264392, -0.015923477709293365, -0.007086203433573246, -0.017408303916454315, 0.005319772753864527, 0.011776205152273178, 0.019333457574248314, 0.004185672849416733, 0.01882144808769226, 0.011458760127425194, -0.04431949183344841, -0.024146340787410736, 0.022344069555401802, 0.027156952768564224, -0.004167752806097269, 0.0035482218954712152, 0.04599888250231743, -0.0005158489802852273, -0.027054551988840103, 0.003891267813742161, 0.026665424928069115, -0.028652019798755646, 0.0003868867352139205, 0.013578476384282112, -0.022118786349892616, 0.004920405801385641, -0.0002334440650884062, -0.0031846954952925444, 0.00881167408078909, 0.02312232367694378, -0.007219325751066208, 0.03420219570398331, 0.014674175530672073, -0.020336994901299477, 0.0037402252200990915, -0.01664029061794281, 0.000318565551424399, 0.024965554475784302, 0.0074702100828289986, 0.04530255123972893, 0.012585179880261421, 0.029778439551591873, -0.0001963234244612977, -0.011806925758719444, 0.008105101063847542, -0.008079500868916512, -0.0253137219697237, -0.005493856035172939, 0.03782721981406212, -0.0216477382928133, 0.03690560534596443, 0.03371066972613335, 0.012114130891859531, -0.02912306785583496, 0.009518246166408062, 0.0035405417438596487, 0.008048780262470245, -0.0038503070827573538, -0.01882144808769226, -0.040735431015491486, 0.06094954162836075, -0.01206293050199747, 0.03727424889802933, 0.0033613385166972876, -0.015329547226428986, 0.01369111891835928, -0.05501024052500725, -0.007936138659715652, 0.011653323657810688, 0.02713647298514843, 0.021709179505705833, -0.006420591846108437, -0.0016358685679733753, -0.014029044657945633, -0.01985570602118969, 0.0009254561155103147, 0.061277229338884354, -0.03795010223984718, 0.008658071048557758, -0.028058089315891266, -0.009748649783432484, -0.018401600420475006, -0.01590299792587757, -0.0272388756275177, 0.019005771726369858, 0.0067994785495102406, -0.021606776863336563, 0.0025958851911127567, -0.04804691672325134, -0.029327871277928352, 0.0005596897681243718, -0.024576429277658463, -0.01569819450378418, 0.050832245498895645, 0.005975143983960152, 0.03512381389737129, -0.03381307050585747, -0.02521131932735443, -0.0006137706805020571, -0.0037351050414144993, -0.011571401730179787, 0.002841649577021599, 0.004720722325146198, 0.0034150995779782534, 0.005621857941150665, 0.013496555387973785, -0.027894247323274612, 0.03799106180667877, -0.02684974856674671, 0.00792077835649252, 0.006830199155956507, 0.02684974856674671, 0.031969837844371796, 0.010567864403128624, -0.0034586202818900347, -0.03663935884833336, 0.01753118634223938, -0.00480520399287343, 0.05754980444908142, 0.021115249022841454, 0.039445169270038605, 0.010168497450649738, 0.03215416148304939, 0.00673291739076376, -0.029204988852143288, 0.007337087765336037, -0.03334202244877815, 0.032461367547512054, -0.010977471247315407, -0.03749953582882881, 0.026665424928069115, -0.03180599585175514, -0.006000744644552469, -0.004820564296096563, 0.03846210986375809, 0.008745112456381321, -0.025518525391817093, 0.010071215219795704, 0.03174455463886261, 0.02783280611038208, -0.016322845593094826, -0.014694656245410442, 0.03045429103076458, -0.012769502587616444, -0.04571215808391571, -0.0004825684300158173, 0.0038938280194997787, 0.03555389866232872, -0.006144107319414616, 2.1620377083308995e-05, -0.018954571336507797, -0.004265034571290016, -0.0062413886189460754, -0.020285794511437416, 0.013660398311913013, 0.03739713132381439, 0.014489852823317051, 0.027771364897489548, 0.023757213726639748, -0.009052317589521408, -0.02301992103457451, 0.00821774359792471, -0.017049897462129593, -0.012687581591308117, -0.008760472759604454, -0.016722211614251137, 0.015780115500092506, 0.0028800503350794315, -0.002380841411650181, -0.014418171718716621, 0.012646620161831379, 0.02381865493953228, -0.021422453224658966, -0.01039890106767416, 0.023736733943223953, -0.029143547639250755, 0.016875814646482468, -0.04931670054793358, -0.008929436095058918, 0.02179110050201416, -0.031724072992801666, 0.0432954765856266, 0.03571774438023567, 0.029696518555283546, -0.026726866140961647, 0.0006176107563078403, -0.04841556400060654, -0.002734127687290311, -0.030085645616054535, 0.029839880764484406, 0.010567864403128624, 0.02238503098487854, -0.03784770146012306, -0.008545429445803165, 0.015216905623674393, -0.004930646158754826, -0.021033326163887978, -0.01718301884829998, 0.022118786349892616, -0.012810463085770607, -0.03035189025104046, -0.01831967942416668, 0.006615155376493931, -0.005089368671178818, 0.009282722137868404, 0.011847886256873608, -0.03100726008415222, -0.023040402680635452, -0.033546824008226395, 0.020019549876451492, -0.023163283243775368, -0.009794730693101883, -0.0029696517158299685, -0.0130971884354949, -0.009958573617041111, 0.03237944468855858, 0.012370135635137558, -0.0056525785475969315, 0.02263079397380352, 0.0336901880800724, 0.006579314824193716, 0.017889592796564102, 0.018196796998381615, -0.03383354842662811, -0.013680879026651382, -0.012533978559076786, -0.022364549338817596, 0.009292962029576302, -0.008386706002056599, -0.008048780262470245, 0.0450567863881588, 0.028099050745368004, -0.016056600958108902, -0.047022901475429535, 0.02412586100399494, -0.002428202424198389, -0.0020301153417676687, 0.01643548719584942, 0.01927201636135578, 0.013773039914667606, -0.006922360509634018, -0.014182647690176964, 0.00935952365398407, -0.009989294223487377, 0.008094861172139645, -0.01737758331000805, -0.015216905623674393, 0.0033382982946932316, -0.02713647298514843, -0.030085645616054535, 0.025231800973415375, 0.013814001344144344, -0.023060882464051247, 0.04304971173405647, 0.00407303124666214, 0.0020173152443021536, 0.04980823025107384, 0.03782721981406212, 0.005596257746219635, 0.0309048593044281, -0.041677527129650116, -0.011438279412686825, -0.002384681487455964, -0.022507913410663605, -0.004208713304251432, -0.006528113968670368, 0.03506237268447876, 0.022098304703831673, 0.015206664800643921, 0.0019507539691403508, -0.010219697840511799, 0.041083596646785736, -0.00551945623010397, -0.04034630209207535, -0.01821727678179741, -0.0017805109964683652, -0.01857568323612213, 0.016312604770064354, -0.012400856241583824, 0.012994786724448204, -0.0062772296369075775, -0.0017817910993471742, 0.0007673733634874225, 0.023101842030882835, 0.019128654152154922, 0.015288586728274822, 0.03727424889802933, -0.009236641228199005, 0.023511450737714767, -0.019374417141079903, 0.022958479821681976, 0.00817166268825531, -0.007480450440198183, -0.012237013317644596, 0.015411469154059887, 0.015718674287199974, -0.006282349582761526, 0.03715136647224426, -0.03164215013384819, 0.0043879165314137936, -0.023634331300854683, 0.026603983715176582, 0.010639545507729053, 0.028713461011648178, -0.025375163182616234, -0.016824614256620407, 0.0074702100828289986, -0.00450823875144124, 0.012021970003843307, -0.007040122989565134, -0.015124743804335594, 0.02228262834250927, 0.02490411512553692, 0.012830943800508976, -0.024474026635289192, -0.01793055236339569, -0.02168869785964489, -0.06398063898086548, 0.028549617156386375, 0.021381493657827377, 0.006200428120791912, 0.014592254534363747, -0.026235338300466537, 0.003983429633080959, 0.0033510983921587467, 0.017070377245545387, -0.01005585491657257, 0.018483523279428482, -0.014377210289239883, -0.002320680534467101, 0.00647691311314702, -0.01403928454965353, 0.036864642053842545, -0.033403463661670685, -0.004759123083204031, 0.011950287967920303, -0.04882517084479332, 0.015831315889954567, 0.010153137147426605, -0.00735244806855917, 0.010178737342357635, 0.005591137334704399, -0.02079780213534832, 0.018657606095075607, 0.038748834282159805, -0.008320145308971405, -0.01976354420185089, -0.036967046558856964, 0.003315257839858532, -0.004794963635504246, 0.0333625003695488, -0.0070964437909424305, -0.044196609407663345, 0.027320796623826027, -0.029737478122115135, 0.025477563962340355, 0.04841556400060654, 0.009948333725333214, -0.0030208525713533163, -0.014766337350010872, -0.01426456868648529, -0.02521131932735443, 0.01683485321700573, -0.007183485198765993, -0.03020852617919445, 0.007035002578049898, 0.04362316057085991, -0.02119717001914978, 0.007562371902167797, 0.036270711570978165], index=0)], model='text-embedding-3-small', usage=Usage(prompt_tokens=7, total_tokens=7), error=None, raw_response=None, input=None)\n" + ] + } + ], + "source": [ + "build_custom_model_client()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BLAF5qTEmoyW" + }, + "source": [ + "# Issues and feedback\n", + "\n", + "If you encounter any issues, please report them here: [GitHub Issues](https://github.com/SylphAI-Inc/LightRAG/issues).\n", + "\n", + "For feedback, you can use either the [GitHub discussions](https://github.com/SylphAI-Inc/LightRAG/discussions) or [Discord](https://discord.gg/ezzszrRZvT)." + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} From a97e6a9870da721cc8eac66492d90439a8da2dfc Mon Sep 17 00:00:00 2001 From: ajithvcoder Date: Tue, 26 Nov 2024 05:16:04 +0000 Subject: [PATCH 14/40] add tutorial python files for sync and async model_client --- .../adalflow_modelclient_sync_and_async.py | 111 ++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 tutorials/adalflow_modelclient_sync_and_async.py diff --git a/tutorials/adalflow_modelclient_sync_and_async.py b/tutorials/adalflow_modelclient_sync_and_async.py new file mode 100644 index 00000000..555d0311 --- /dev/null +++ b/tutorials/adalflow_modelclient_sync_and_async.py @@ -0,0 +1,111 @@ +import asyncio +import time +from adalflow.components.model_client import ( + OpenAIClient, +) # Assuming OpenAIClient with .call() and .acall() is available +from adalflow.core.types import ModelType + +from getpass import getpass +import os + +from adalflow.utils import setup_env + +# Load environment variables - Make sure to have OPENAI_API_KEY in .env file and .env is present in current folder +if os.path.isfile(".env"): + setup_env(".env") + +# Prompt user to enter their API keys securely +if "OPENAI_API_KEY" not in os.environ: + openai_api_key = getpass("Please enter your OpenAI API key: ") + # Set environment variables + os.environ["OPENAI_API_KEY"] = openai_api_key + print("API keys have been set.") + + +# Synchronous function for benchmarking .call() +def benchmark_sync_call(api_kwargs, runs=10): + """ + Benchmark the synchronous .call() method by running it multiple times. + + Parameters: + - api_kwargs: The arguments to be passed to the API call + - runs: The number of times to run the call (default is 10) + """ + # List to store responses + responses = [] + + # Record the start time of the benchmark + start_time = time.time() + + # Perform synchronous API calls for the specified number of runs + responses = [ + openai_client.call( + api_kwargs=api_kwargs, # API arguments + model_type=ModelType.LLM, # Model type (e.g., LLM for language models) + ) + for _ in range(runs) # Repeat 'runs' times + ] + + # Record the end time after all calls are completed + end_time = time.time() + + # Output the results of each synchronous call + for i, response in enumerate(responses): + print(f"sync call {i + 1} completed: {response}") + + # Print the total time taken for all synchronous calls + print(f"\nSynchronous benchmark completed in {end_time - start_time:.2f} seconds") + + +# Asynchronous function for benchmarking .acall() +async def benchmark_async_acall(api_kwargs, runs=10): + """ + Benchmark the asynchronous .acall() method by running it multiple times concurrently. + + Parameters: + - api_kwargs: The arguments to be passed to the API call + - runs: The number of times to run the asynchronous call (default is 10) + """ + # Record the start time of the benchmark + start_time = time.time() + + # Create a list of asynchronous tasks for the specified number of runs + tasks = [ + openai_client.acall( + api_kwargs=api_kwargs, # API arguments + model_type=ModelType.LLM, # Model type (e.g., LLM for language models) + ) + for _ in range(runs) # Repeat 'runs' times + ] + + # Execute all tasks concurrently and wait for them to finish + responses = await asyncio.gather(*tasks) + + # Record the end time after all tasks are completed + end_time = time.time() + + # Output the results of each asynchronous call + for i, response in enumerate(responses): + print(f"Async call {i + 1} completed: {response}") + + # Print the total time taken for all asynchronous calls + print(f"\nAsynchronous benchmark completed in {end_time - start_time:.2f} seconds") + + +if __name__ == "__main__": + # Initialize the OpenAI client + openai_client = OpenAIClient() + + # Sample prompt for testing + prompt = "Tell me a joke." + + model_kwargs = {"model": "gpt-3.5-turbo", "temperature": 0.5, "max_tokens": 100} + api_kwargs = openai_client.convert_inputs_to_api_kwargs( + input=prompt, model_kwargs=model_kwargs, model_type=ModelType.LLM + ) + # Run both benchmarks + print("Starting synchronous benchmark...\n") + benchmark_sync_call(api_kwargs) + + print("\nStarting asynchronous benchmark...\n") + asyncio.run(benchmark_async_acall(api_kwargs)) From 805ecdd7dcc28d1d2cbc7d49c84d6222a5683e20 Mon Sep 17 00:00:00 2001 From: ajithvcoder Date: Tue, 26 Nov 2024 05:23:34 +0000 Subject: [PATCH 15/40] docs: add Colab and GitHub links to model client tutorial --- docs/source/tutorials/model_client.rst | 14 +++++++++++++- notebooks/tutorials/adalflow_modelclient.ipynb | 12 +++++++++--- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/docs/source/tutorials/model_client.rst b/docs/source/tutorials/model_client.rst index 943cc574..997b222c 100644 --- a/docs/source/tutorials/model_client.rst +++ b/docs/source/tutorials/model_client.rst @@ -1,3 +1,15 @@ +.. raw:: html + + + .. _tutorials-model_client: ModelClient @@ -267,7 +279,7 @@ The output will be: .. TODO: add optional package introduction here - +.. TODO: update additional code relavant to notebook links .. admonition:: API reference :class: highlight diff --git a/notebooks/tutorials/adalflow_modelclient.ipynb b/notebooks/tutorials/adalflow_modelclient.ipynb index bd77a449..1674c69a 100644 --- a/notebooks/tutorials/adalflow_modelclient.ipynb +++ b/notebooks/tutorials/adalflow_modelclient.ipynb @@ -6,9 +6,15 @@ "id": "hGLYrUwBmvUD" }, "source": [ - "\n", - " \"Open\n", - "\n" + "" ] }, { From c35b5aa849c60a2f6a0775934f23a3fa23e34142 Mon Sep 17 00:00:00 2001 From: ajithvcoder Date: Tue, 26 Nov 2024 07:29:15 +0000 Subject: [PATCH 16/40] docs: add .rst file for model client --- docs/source/tutorials/model_client.rst | 1234 +++++++++++++++++++++++- 1 file changed, 1233 insertions(+), 1 deletion(-) diff --git a/docs/source/tutorials/model_client.rst b/docs/source/tutorials/model_client.rst index 997b222c..662c03b4 100644 --- a/docs/source/tutorials/model_client.rst +++ b/docs/source/tutorials/model_client.rst @@ -279,7 +279,1239 @@ The output will be: .. TODO: add optional package introduction here -.. TODO: update additional code relavant to notebook links + +OPENAI EMBEDDER - Embedding Processing Example +------------------------------------------------- + +In this example, we are using a collection of embeddings to demonstrate different functionalities such as calculating semantic similarity, finding nearest neighbors, and averaging embeddings. Below is the Python code used to achieve these tasks: + +.. code-block:: python + + from typing import List + import numpy as np + from adalflow.core.types import ModelType, EmbedderOutput + from adalflow.components.model_client import OpenAIClient + from dataclasses import dataclass + from enum import Enum + from numpy.linalg import norm + +Data Classes + +We use two dataclass types to structure the collection and usage data: + +EmbeddingCollection: Stores an individual embedding collection and its corresponding index. +Usage: Keeps track of token usage, such as prompt_tokens and total_tokens. + +.. code-block:: python + + @dataclass + class EmbeddingCollection: + collection: List[float] + cindex: int + + + @dataclass + class Usage: + prompt_tokens: int + total_tokens: int + +The following function, `get_openai_embedding`, sends a request to the OpenAI API to retrieve embeddings for a given text. It sets the model type to `EMBEDDER`, prepares the required model-specific parameters, and processes the response: + +.. code-block:: python + + openai_client = OpenAIClient() + + def get_openai_embedding(text): + # Set model type to EMBEDDER for embedding functionality + model_type = ModelType.EMBEDDER + + # Prepare input and model-specific parameters + input = text + model_kwargs = { + "model": "text-embedding-3-small", + "dimensions": 8, + "encoding_format": "float", + } + + # Convert inputs to the required API format + api_kwargs = openai_client.convert_inputs_to_api_kwargs( + input=input, model_kwargs=model_kwargs, model_type=model_type + ) + print(f"api_kwargs: {api_kwargs}") # Debug output to verify API arguments + + # Call OpenAI API and parse response for embeddings + response = openai_client.call(api_kwargs=api_kwargs, model_type=model_type) + reponse_embedder_output = openai_client.parse_embedding_response(response) + print( + f"reponse_embedder_output: {reponse_embedder_output}" + ) # Debug output to verify embeddings + return reponse_embedder_output + +Embedding Processing + +The function process_embeddings takes in a collection of embeddings and provides utilities for calculating similarity, averaging embeddings, and finding nearest neighbors: + +Similarity: Measures the cosine similarity between two embeddings. +Average Embedding: Computes the mean embedding across a set of embeddings. +Nearest Neighbors: Identifies the top-k nearest neighbors based on cosine similarity. + +.. code-block:: python + + def process_embeddings(embeddings_collection): + # Extract embedding data for each item in the collection + embeddingOutput = [emb.collection for emb in embeddings_collection] + embeddingDataList = [each_emb_out.data for each_emb_out in embeddingOutput] + embeddingList = [ + each_item.embedding + for each_emb_data in embeddingDataList + for each_item in each_emb_data + ] + + # Convert to numpy array for easier manipulation and calculations + embeddings_array = np.array(embeddingList) + + def calculate_similarity(emb1, emb2): + # Compute cosine similarity between two embeddings + return np.dot(emb1, emb2) / (norm(emb1) * norm(emb2)) + + def get_average_embedding(embeddings_list): + # Calculate the mean embedding across a list of embeddings + return np.mean(embeddings_list, axis=0) + + def find_nearest_neighbors( + query_index: int, embedding_list: List[List[float]], k: int = 5 + ): + # Find top-k most similar embeddings to a query embedding, based on cosine similarity + query_embedding = embedding_list[query_index] + similarities = [ + (i, calculate_similarity(query_embedding, emb)) + for i, emb in enumerate(embedding_list) + if i != query_index + ] + return sorted(similarities, key=lambda x: x[1], reverse=True)[:k] + + # Return dictionary of functions and processed data for further use + return { + "embeddings_array": embeddings_array, + "calculate_similarity": calculate_similarity, + "average_embedding": get_average_embedding, + "find_nearest_neighbors": find_nearest_neighbors, + } + +The function `demonstrate_embeddings_usage` showcases how to analyze semantic similarities, find nearest neighbors, and calculate average embeddings for sample texts. It selects random texts, compares their similarities, finds nearest neighbors for a specific query, and compares average embeddings for texts containing "Paris". + +.. code-block:: python + + # Demonstrate embeddings usage with sample data + def demonstrate_embeddings_usage(sample_embeddings, input_text_list): + # Initialize processor and retrieve embeddings array + processor = process_embeddings(sample_embeddings) + embeddings = processor["embeddings_array"] + + print("1. Analyzing Semantic Similarities:") + print("-" * 50) + + # Select a few random indices for similarity testing + num_indices = 5 + assert len(input_text_list) == len(embeddings) + indices = np.random.choice(len(input_text_list), num_indices, replace=False) + selected_text = np.array(input_text_list)[indices] + selected_embeddings = np.array(embeddings)[indices] + + # Display selected texts and their embeddings + print("Selected indices:", indices) + print("Selected elements from array1:", selected_text) + print("Selected elements from array2:", selected_embeddings) + + # Calculate similarity between each pair of selected texts + for i in range(len(selected_text)): + for j in range(i + 1, len(selected_text)): + similarity = processor["calculate_similarity"]( + selected_embeddings[i], selected_embeddings[j] + ) + print(f"\nComparing:\n'{selected_text[i]}' \nwith:\n'{selected_text[j]}'") + print(f"Similarity score: {similarity:.4f}") + + print("\n2. Finding Nearest Neighbors:") + print("-" * 50) + + # Find and display the 3 nearest neighbors for the first text + query_idx = 0 + neighbors = processor["find_nearest_neighbors"](query_idx, embeddings, k=3) + print(f"\nQuery text: '{input_text_list[query_idx]}'") + print("\nNearest neighbors:") + + for idx, similarity in neighbors: + print(f"- '{input_text_list[idx]}' (similarity: {similarity:.4f})") + + print("\n3. Using Average Embeddings:") + print("-" * 50) + + # Calculate and compare the average embedding for texts containing "Paris" + paris_indices = [i for i, text in enumerate(input_text_list) if "Paris" in text] + paris_embeddings = embeddings[paris_indices] + avg_paris_embedding = processor["average_embedding"](paris_embeddings) + + print("\nComparing average 'Paris' embedding with all texts:") + for i, text in enumerate(input_text_list): + similarity = processor["calculate_similarity"]( + avg_paris_embedding, embeddings[i] + ) + print(f"- '{text}' (similarity: {similarity:.4f})") + + +Running the Model Client + +Finally, we run the model client by initializing a set of sample texts, generating their embeddings, and using the embedding processing functions to analyze similarities and neighbors. + +.. code-block:: python + + def run_model_client_embedding_usage(): + # Define a set of sample texts to test embedding and similarity functionalities + sample_texts = [ + "What is the capital of France?", + "Paris is the capital of France.", + "What is the population of France?", + "How big is Paris?", + "What is the weather like in Paris?", + ] + + # Duplicate each sample text to form an input list with repeated entries (for embedding testing) + input_text_list = [text for text in sample_texts for _ in range(2)] + + # Generate embeddings for each text in the input list, and store them in an EmbeddingCollection + embeddings_collection = [ + EmbeddingCollection(collection=get_openai_embedding(text), cindex=i) + for i, text in enumerate(input_text_list) + ] + print( + embeddings_collection + ) # Debugging output to verify embeddings collection content + + # Demonstrate the usage of embeddings by analyzing similarities, finding neighbors, etc. + demonstrate_embeddings_usage(embeddings_collection, input_text_list) + +To execute the complete example, simply call the `run_model_client_embedding_usage()` function: + +.. code-block:: python + + run_model_client_embedding_usage() + + +This will trigger the embedding retrieval and processing functions, and you will see the results printed out, demonstrating how embeddings can be used for similarity analysis, neighbor finding, and averaging. + +OPENAI LLM Chat - Multichat Usage +------------------------------------------------- +This example demonstrates how to create a multichat system using OpenAI's LLM with adalflow, where the assistant's responses depend on the entire conversation history. This allows for a more dynamic and context-aware conversation flow. + +.. code-block:: python + + from adalflow.components.model_client import OpenAIClient + from adalflow.core.types import ModelType + from adalflow.utils import setup_env + from typing import List, Dict + +ChatConversation Class + +Here, we define a ``ChatConversation`` class to manage the conversation history and make API calls to the OpenAI model. The assistant's responses are generated based on the entire conversation history. + +.. code-block:: python + + class ChatConversation: + def __init__(self): + # Initialize the OpenAI client for managing API calls + self.openai_client = OpenAIClient() + # Initialize an empty conversation history to store chat messages + self.conversation_history: str = "" + # Model parameters to customize the API call + self.model_kwargs = { + "model": "gpt-3.5-turbo", + "temperature": 0.5, # Controls randomness; 0.5 for balanced responses + "max_tokens": 100, # Limits the response length + } + + def add_user_message(self, message: str): + """Add a user message to the conversation history""" + self.conversation_history += ( + f" {message} " # Format for user message + ) + + def add_assistant_message(self, message: str): + """Add an assistant message to the conversation history""" + self.conversation_history += ( + f" {message} " # Format for assistant message + ) + + def get_response(self) -> str: + """Get response from the model based on conversation history""" + # Convert the conversation history and model parameters into API arguments + api_kwargs = self.openai_client.convert_inputs_to_api_kwargs( + input=self.conversation_history, + model_kwargs=self.model_kwargs, + model_type=ModelType.LLM, + ) + print(f"api_kwargs: {api_kwargs}") # Debugging output to verify API parameters + + # Call the API with the generated arguments to get a response + response = self.openai_client.call( + api_kwargs=api_kwargs, model_type=ModelType.LLM + ) + print("response: ", response) # Debugging output for raw API response + + # Extract and parse the text response from the API output + response_text = self.openai_client.parse_chat_completion(response) + # Update conversation history with the assistant's response + self.add_assistant_message(response_text) + return response_text # Return the assistant's response to the caller + +Simulating a Multi-turn Conversation + +In the ``check_chat_conversation()`` function, we simulate a multi-turn conversation by iterating over a list of user questions. Each question is added to the conversation history, and the assistant responds based on the accumulated conversation context. + +.. code-block:: python + + def check_chat_conversation(): + # Initialize a new chat conversation + chat = ChatConversation() + + # Example list of user questions to simulate a multi-turn conversation + questions = [ + "What is the capital of France?", + "What is its population?", + "Tell me about its famous landmarks", + ] + + # Iterate through each question in the list + for question in questions: + print(f"\nUser: {question}") # Display the user's question + chat.add_user_message( + question + ) # Add the user question to the conversation history + + response = ( + chat.get_response() + ) # Get assistant's response based on conversation history + print(f"Assistant: {response}") # Display the assistant's response + + # Display the full conversation history after all exchanges + print("\nFull Conversation History:") + print(chat.conversation_history) # Print the accumulated conversation history + +Key Points +You can observe that each question is depended on previous question and the chat responds in apt manner +check_chat_conversation() + +OPENAI LLM Chat - Multichat Usage - Asynchronous +------------------------------------------------- + +This example demonstrates how to create an asynchronous multichat system using OpenAI's LLM with adalflow. The asynchronous approach allows handling multiple questions in parallel, making the interaction more efficient when dealing with unrelated queries. + +.. code-block:: python + + import asyncio + from adalflow.components.model_client import OpenAIClient + from adalflow.core.types import ModelType + from typing import List + +ChatConversationAsync Class + +The ``ChatConversationAsync`` class is designed to handle asynchronous API calls to the OpenAI model. It supports concurrent requests, which improves performance when interacting with multiple questions simultaneously. + +.. code-block:: python + + class ChatConversationAsync: + def __init__(self): + # Initialize with an asynchronous OpenAI client + self.openai_client = OpenAIClient() + + # Default model parameters for the chat + self.model_kwargs = { + "model": "gpt-3.5-turbo", # Model used for chat + "temperature": 0.5, # Controls randomness in response + "max_tokens": 100, # Maximum tokens in the generated response + } + + async def get_response(self, message: str) -> str: + """Asynchronously get a response from the model for a given user message""" + + # Convert input message and model parameters into the format expected by the API + api_kwargs = self.openai_client.convert_inputs_to_api_kwargs( + input=message, # User's message input + model_kwargs=self.model_kwargs, # Model-specific settings + model_type=ModelType.LLM, # Specify the model type as a language model (LLM) + ) + print(f"api_kwargs: {api_kwargs}") # Log the API arguments for debugging + + # Make an asynchronous API call to OpenAI's model + response = await self.openai_client.acall( + api_kwargs=api_kwargs, # Pass the prepared arguments + model_type=ModelType.LLM, # Specify the model type again + ) + print("response: ", response) # Print the raw response from the API + + # Parse the API response to extract the assistant's reply (chat completion) + response_text = self.openai_client.parse_chat_completion(response) + return response_text # Return the parsed response text + +Running Multiple Asynchronous Chat Sessions + +In the ``check_chat_conversations_async()`` function, we handle a list of unrelated user questions concurrently. This is done by creating a list of asynchronous tasks and gathering their responses. + +.. code-block:: python + + async def check_chat_conversations_async(): + # Create an instance of ChatConversationAsync to handle asynchronous operations + chat = ChatConversationAsync() + + # List of unrelated questions that will be handled in parallel + questions = [ + "What is the capital of France?", # Question 1 + "Is dog a wild animal?", # Question 2 + "Tell me about amazon forest", # Question 3 + ] + + # Create a list of asynchronous tasks, one for each question + # Each task calls the get_response method asynchronously for a question + tasks = [chat.get_response(question) for question in questions] + + # Gather the results of all asynchronous tasks concurrently + responses = await asyncio.gather(*tasks) + + # Print the responses from the assistant along with the respective user questions + for question, response in zip(questions, responses): + print(f"\nUser: {question}") + print(f"Assistant: {response}") + +Running the Asynchronous Function + +To execute the asynchronous function, you can use the following methods based on your environment: + +.. code-block:: python + + # Run the asynchronous function if in a file + # asyncio.run(check_chat_conversations_async()) + + # in jupyter notebook + await check_chat_conversations_async() + +This approach allows you to handle multiple independent conversations concurrently, improving the system's performance and responsiveness. + +OPENAI LLM Chat - Multichat Usage - Benchmark sync() vs async() +--------------------------------------------------------------------- + +This section compares the performance of synchronous (``call()``) vs. asynchronous (``acall()``) API calls to OpenAI's language model, benchmarking them using a sample prompt to determine which approach is more efficient for handling multiple API requests. + +.. code-block:: python + + import asyncio + import time + from adalflow.components.model_client import ( + OpenAIClient, + ) # Assuming OpenAIClient with .call() and .acall() is available + from adalflow.core.types import ModelType + +Setup for Benchmarking + +We initialize the OpenAI client and set up a sample prompt to test both synchronous and asynchronous API calls. + +.. code-block:: python + + # Initialize the OpenAI client + openai_client = OpenAIClient() + + # Sample prompt for testing + prompt = "Tell me a joke." + + model_kwargs = {"model": "gpt-3.5-turbo", "temperature": 0.5, "max_tokens": 100} + +Synchronous Benchmarking + +The ``benchmark_sync_call`` function runs the synchronous ``.call()`` method multiple times and measures the total time taken for all requests. + +.. code-block:: python + + # Synchronous function for benchmarking .call() + def benchmark_sync_call(api_kwargs, runs=10): + """ + Benchmark the synchronous .call() method by running it multiple times. + + Parameters: + - api_kwargs: The arguments to be passed to the API call + - runs: The number of times to run the call (default is 10) + """ + # List to store responses + responses = [] + + # Record the start time of the benchmark + start_time = time.time() + + # Perform synchronous API calls for the specified number of runs + responses = [ + openai_client.call( + api_kwargs=api_kwargs, # API arguments + model_type=ModelType.LLM, # Model type (e.g., LLM for language models) + ) + for _ in range(runs) # Repeat 'runs' times + ] + + # Record the end time after all calls are completed + end_time = time.time() + + # Output the results of each synchronous call + for i, response in enumerate(responses): + print(f"sync call {i + 1} completed: {response}") + + # Print the total time taken for all synchronous calls + print(f"\nSynchronous benchmark completed in {end_time - start_time:.2f} seconds") + + + # Asynchronous function for benchmarking .acall() + async def benchmark_async_acall(api_kwargs, runs=10): + """ + Benchmark the asynchronous .acall() method by running it multiple times concurrently. + + Parameters: + - api_kwargs: The arguments to be passed to the API call + - runs: The number of times to run the asynchronous call (default is 10) + """ + # Record the start time of the benchmark + start_time = time.time() + + # Create a list of asynchronous tasks for the specified number of runs + tasks = [ + openai_client.acall( + api_kwargs=api_kwargs, # API arguments + model_type=ModelType.LLM, # Model type (e.g., LLM for language models) + ) + for _ in range(runs) # Repeat 'runs' times + ] + + # Execute all tasks concurrently and wait for them to finish + responses = await asyncio.gather(*tasks) + + # Record the end time after all tasks are completed + end_time = time.time() + + # Output the results of each asynchronous call + for i, response in enumerate(responses): + print(f"Async call {i + 1} completed: {response}") + + # Print the total time taken for all asynchronous calls + print(f"\nAsynchronous benchmark completed in {end_time - start_time:.2f} seconds") + +.. code-block:: python + + api_kwargs = openai_client.convert_inputs_to_api_kwargs( + input=prompt, model_kwargs=model_kwargs, model_type=ModelType.LLM + ) + + # Run both benchmarks + print("Starting synchronous benchmark...\n") + benchmark_sync_call(api_kwargs) + + # Run the asynchronous function if in a file + # asyncio.run(benchmark_async_acall(api_kwargs)) + + print("\nStarting asynchronous benchmark...\n") + await benchmark_async_acall(api_kwargs) + +OPENAI LLM Chat - Additional Utils +------------------------------------------------- + +This section demonstrates the use of additional utility functions for OpenAI's language model client. The following utility functions are included: + +- ``get_first_message_content()`` +- ``get_all_messages_content()`` +- ``get_probabilities()`` + +These utilities can be used to interact with the OpenAI model in various ways, such as extracting the first message content, retrieving all message content from a multi-chat scenario, and calculating the probabilities of tokens. + +Code Setup + +First, we import necessary components for utilizing the OpenAI client and the utilities from the ``adalflow`` library. + +.. code-block:: python + + from adalflow.components.model_client import OpenAIClient + from adalflow.core.types import ModelType + from adalflow.utils import setup_env + from adalflow.components.model_client.openai_client import ( + get_first_message_content, + get_all_messages_content, + get_probabilities, + ) + from adalflow.core import Generator + +Function: ``check_openai_additional_utils`` + +This function demonstrates how to use the OpenAI client along with a custom utility function for generating responses from the model, based on the given query and utility function. + +.. code-block:: python + + def check_openai_additional_utils(func, model_kwargs): + """ + This function demonstrates the usage of the OpenAI client and a custom utility function + for generating responses from the LLM model, based on the given query in openai client. + + Parameters: + - func: A function that will be used to parse the chat completion (for custom parsing). + - model_kwargs: The additional model parameters (e.g., temperature, max_tokens) to be used in the model. + + Returns: + - output: The generated response from the model based on the query. + """ + + # Initialize the OpenAI client with a custom chat completion parser + openai_client = OpenAIClient(chat_completion_parser=func) + + # Define a sample query (user question) + query = "What is the capital of France?" + + # Set the model type to LLM (Large Language Model) + model_type = ModelType.LLM + + # Create the prompt by formatting the user query as a conversation + prompt = f"User: {query}\n" + + # Define any additional parameters needed for the model (e.g., the input string) + prompt_kwargs = { + "input_str": "What is the capital of France?", + } + + # Initialize the Generator with the OpenAI client and model parameters + generator = Generator(model_client=openai_client, model_kwargs=model_kwargs) + + # Execute the generator to get a response for the prompt (using the defined prompt_kwargs) + output = generator(prompt_kwargs=prompt_kwargs) + + # Return the generated output (response from the LLM) + return output + +Function: ``run_utils_functions`` + +This function runs a series of utility functions using different model configurations for generating responses. It demonstrates how to check OpenAI model outputs using various utility functions. + +.. code-block:: python + + def run_utils_functions(): + """ + This function runs a series of utility functions using different model + configurations for generating responses. It demonstrates how to check + OpenAI model outputs using various utility functions. + """ + + # Define the model arguments for the probability-based function (with logprobs) + probability_model_kwargs = { + "model": "gpt-3.5-turbo", # Specify the model version + "logprobs": True, # Enable logprobs to get probability distributions for tokens + "n": 2, # Request 2 different completions for each query + } + + # Define general model arguments for most other functions + model_kwargs = { + "model": "gpt-3.5-turbo", # Specify the model version + "temperature": 0.5, # Control the randomness of responses (0 is deterministic) + "max_tokens": 100, # Set the maximum number of tokens (words) in the response + } + + # List of functions to run with corresponding model arguments + func_list = [ + [ + get_probabilities, + probability_model_kwargs, + ], # Function to get probabilities with specific kwargs + [ + get_first_message_content, + model_kwargs, + ], # Function to get first message content + [ + get_all_messages_content, + model_kwargs, + ], # Function to get all messages content in multi-chat scenarios + ] + + # Loop through each function and its corresponding arguments + for each_func in func_list: + # Check the function output using the specified arguments + result = check_openai_additional_utils(each_func[0], each_func[1]) + + # Print the function and result for debugging purposes + print(f"Function: {each_func[0].__name__}, Model Args: {each_func[1]}") + print(f"Result: {result}") + +Running the Utility Functions + +To execute the utility functions, we call the ``run_utils_functions()`` method, which runs the defined functions and prints their results. + +.. code-block:: python + + run_utils_functions() + +Purpose and Usage +These utilities (``get_first_message_content``, ``get_all_messages_content``, and ``get_probabilities``) allow users to extract specific information from the OpenAI LLM responses, such as individual message contents in a chat or the probability distribution over tokens. + + +Groq LLM Chat - Multichat Usage +------------------------------------------------- + +Note: Groq doesnt have embedder method to get embeddings like openai + +The following example demonstrates how to set up a multi-turn conversation with the Groq LLM using the ``GroqAPIClient``. + +.. code-block:: python + + from adalflow.components.model_client import GroqAPIClient + from adalflow.core.types import ModelType + from adalflow.utils import setup_env + from typing import List, Dict + +ChatConversation Class + +This class handles the conversation flow by interacting with the Groq model, keeping track of the conversation history, and generating responses. + +.. code-block:: python + + class ChatConversation: + def __init__(self): + """ + Initialize a new ChatConversation object. + - GroqAPIClient is used to interact with the Groq model. + - conversation_history keeps track of the conversation between the user and assistant. + - model_kwargs contains the model parameters like temperature and max tokens. + """ + self.groq_client = ( + GroqAPIClient() + ) # Initialize GroqAPIClient for model interaction + self.conversation_history: str = ( + "" # Initialize conversation history as an empty string + ) + self.model_kwargs = { + "model": "llama3-8b-8192", # Specify the model to use + "temperature": 0.5, # Set the temperature for response variability + "max_tokens": 100, # Limit the number of tokens in the response + } + + def add_user_message(self, message: str): + """ + Add a user message to the conversation history in the required format. + The message is wrapped with tags for better processing by the assistant. + """ + self.conversation_history += ( + f" {message} " # Append user message to history + ) + + def add_assistant_message(self, message: str): + """ + Add an assistant message to the conversation history in the required format. + The message is wrapped with tags for better processing. + """ + self.conversation_history += ( + f" {message} " # Append assistant message to history + ) + + def get_response(self) -> str: + """ + Generate a response from the assistant based on the conversation history. + - Converts the conversation history and model kwargs into the format required by the Groq API. + - Calls the API to get the response. + - Parses and adds the assistant's reply to the conversation history. + """ + # Prepare the request for the Groq API, converting the inputs into the correct format + api_kwargs = self.groq_client.convert_inputs_to_api_kwargs( + input=self.conversation_history, # Use the conversation history as input + model_kwargs=self.model_kwargs, # Include model-specific parameters + model_type=ModelType.LLM, # Specify the model type (Large Language Model) + ) + print(f"api_kwargs: {api_kwargs}") # Log the API request parameters + + # Call the Groq model API to get the response + response = self.groq_client.call( + api_kwargs=api_kwargs, + model_type=ModelType.LLM, # Specify the model type again for clarity + ) + print("response: ", response) # Log the API response + + # Parse the response to extract the assistant's reply + response_text = self.groq_client.parse_chat_completion(response) + + # Add the assistant's message to the conversation history + self.add_assistant_message(response_text) + + # Return the assistant's response text + return response_text + +Example Multi-Turn Conversation + +The following function simulates a multi-turn conversation, where the user asks a series of questions and the assistant responds. It demonstrates how user inputs are processed, and responses are generated while maintaining the conversation history. + +.. code-block:: python + + def check_chat_conversation(): + """ + This function simulates a multi-turn conversation between a user and an assistant. + It demonstrates how user inputs are processed, and the assistant generates responses, + while maintaining the conversation history for each query. + """ + # Initialize the ChatConversation object + chat = ChatConversation() # This creates an instance of the ChatConversation class + + # Define a list of user questions for a multi-turn conversation + questions = [ + "What is the capital of France?", # First user question + "What is its population?", # Second user question + "Tell me about its famous landmarks", # Third user question + ] + + # Loop through each question and get the assistant's response + for question in questions: + # Print the current question from the user + print(f"\nUser: {question}") + + # Add the user's message to the conversation history + chat.add_user_message(question) + + # Get the assistant's response based on the conversation history + response = chat.get_response() + + # Print the assistant's response + print(f"Assistant: {response}") + + # After the conversation, print the full conversation history + print("\nFull Conversation History:") + print( + chat.conversation_history + ) # This will print all messages (user and assistant) in the conversation history + +Run the following to use groq_client multichat ability + +.. code-block:: python + + check_chat_conversation() + +Groq LLM Chat - Multichat Usage - Asynchronous +------------------------------------------------- + +This example demonstrates how to perform multi-turn conversations with the Groq LLM using asynchronous calls for each query. It uses Python's `asyncio` to handle multiple independent requests concurrently. + +.. code-block:: python + + import asyncio + from adalflow.components.model_client import GroqAPIClient + from adalflow.core.types import ModelType + from typing import List + +ChatConversation Class + +This class allows you to interact asynchronously with the Groq model. The get_response method fetches responses from the model for a single user input asynchronously. + +.. code-block:: python + + class ChatConversation: + def __init__(self): + # Using an asynchronous client for communication with GroqAPI + self.groq_client = GroqAPIClient() # Create an instance of GroqAPIClient + # Model configuration parameters (e.g., Llama model with 8b parameters and 8192 context length) + self.model_kwargs = { + "model": "llama3-8b-8192", # Llama model with specific size + "temperature": 0.5, # Degree of randomness in the model's responses + "max_tokens": 100, # Maximum number of tokens in the response + } + + async def get_response(self, message: str) -> str: + """Get response from the model for a single message asynchronously""" + + # Convert the user input message to the appropriate format for the Groq API + api_kwargs = self.groq_client.convert_inputs_to_api_kwargs( + input=message, # User's input message + model_kwargs=self.model_kwargs, # Model parameters + model_type=ModelType.LLM, # Model type for large language models (LLM) + ) + print(f"api_kwargs: {api_kwargs}") # Print the API arguments for debugging + + # Asynchronously call the Groq API with the provided API arguments + response = await self.groq_client.acall( + api_kwargs=api_kwargs, # Pass the API arguments + model_type=ModelType.LLM, # Specify the model type + ) + print("response: ", response) # Print the API response for debugging + + # Parse the response to extract the assistant's reply from the API response + response_text = self.groq_client.parse_chat_completion(response) + return response_text # Return the assistant's response text + +Example Asynchronous Multi-Turn Conversation + +The following function demonstrates how multiple independent questions are handled asynchronously. Each question is processed concurrently, and their responses are gathered using asyncio.gather. + +.. code-block:: python + + async def check_chat_conversations(): + # Create an instance of ChatConversation + chat = ChatConversation() + + # List of unrelated questions for independent async calls + questions = [ + "What is the capital of France?", + "Is dog a wild animal ?", + "Tell me about amazon forest", + ] + + # Run each question as an independent asynchronous task + tasks = [chat.get_response(question) for question in questions] + # Gather all the responses concurrently + responses = await asyncio.gather(*tasks) + + # Display each response alongside the question + for question, response in zip(questions, responses): + print(f"\nUser: {question}") + print(f"Assistant: {response}") + +To execute the function, run the following: + +.. code-block:: python + + # Run the asynchronous function if in a file + # asyncio.run(check_chat_conversations()) + + await check_chat_conversations() + +Groq LLM Chat - Multichat Usage - Benchmark sync() vs async() +----------------------------------------------------------------- + +This example demonstrates how to benchmark the synchronous ``.call()`` method versus the asynchronous ``.acall()`` method for making API calls using Groq. The benchmark compares the time taken to execute multiple API requests synchronously and asynchronously. + + +.. code-block:: python + + import asyncio + import time + from adalflow.components.model_client import ( + GroqAPIClient, + ) # Assuming GroqAPI with .call() and .acall() is available + from adalflow.core.types import ModelType + +Initialization + +The following code initializes the Groq client and sets up the sample prompt and model parameters for testing. + +.. code-block:: python + + # Initialize the Groq client + groq_client = GroqAPIClient() + + # Sample prompt for testing + prompt = "Tell me a joke." + + model_kwargs = {"model": "llama3-8b-8192", "temperature": 0.5, "max_tokens": 100} + +Benchmarking Synchronous `.call()` Method + +This function benchmarks the synchronous `.call()` method by calling the Groq API synchronously multiple times. + +.. code-block:: python + + # Synchronous function for benchmarking .call() + def benchmark_sync_call(api_kwargs, runs=10): + # List to store responses from each synchronous call + responses = [] + + # Record the start time for benchmarking + start_time = time.time() + + # Perform synchronous API calls in a loop + responses = [ + groq_client.call( # Calling the API synchronously + api_kwargs=api_kwargs, # Passing the API arguments + model_type=ModelType.LLM, # Defining the model type + ) + for _ in range(runs) # Repeat the call 'runs' times + ] + + # Record the end time after all calls are completed + end_time = time.time() + + # Print out the response from each synchronous call + for i, response in enumerate(responses): + print(f"sync call {i + 1} completed: {response}") + + # Print the total time taken for the synchronous benchmark + print(f"\nSynchronous benchmark completed in {end_time - start_time:.2f} seconds") + +Benchmarking Asynchronous ``.acall()`` Method + +This asynchronous function benchmarks the ``.acall()`` method by calling the Groq API asynchronously multiple times using asyncio.gather() to execute tasks concurrently. + +.. code-block:: python + + # Asynchronous function for benchmarking .acall() + async def benchmark_async_acall(api_kwargs, runs=10): + # Record the start time for benchmarking + start_time = time.time() + + # Create a list of tasks for asynchronous API calls + tasks = [ + groq_client.acall( # Calling the API asynchronously + api_kwargs=api_kwargs, # Passing the API arguments + model_type=ModelType.LLM, # Defining the model type + ) + for _ in range(runs) # Repeat the call 'runs' times + ] + + # Await the completion of all tasks concurrently + responses = await asyncio.gather( + *tasks + ) # Gather all the responses from asynchronous calls + + # Record the end time after all asynchronous calls are completed + end_time = time.time() + + # Print out the response from each asynchronous call + for i, response in enumerate(responses): + print(f"Async call {i + 1} completed: {response}") + + # Print the total time taken for the asynchronous benchmark + print(f"\nAsynchronous benchmark completed in {end_time - start_time:.2f} seconds") + +Running the Benchmarks + +The following code sets up the API arguments and runs both the synchronous and asynchronous benchmarks. + +.. code-block:: python + + api_kwargs = groq_client.convert_inputs_to_api_kwargs( + input=prompt, model_kwargs=model_kwargs, model_type=ModelType.LLM + ) + + # Run both benchmarks + print("Starting synchronous benchmark...\n") + benchmark_sync_call(api_kwargs) + + print("\nStarting asynchronous benchmark...\n") + await benchmark_async_acall(api_kwargs) + +Building Custom Model client +------------------------------------------------- + +Building a Synchronous api call + +Note: I am using openai api as a example to build custom model client +in adalflow. Even though its already there in adalflow repo below +code will definitly be a starter code whom ever wants to build a +custom model client + +.. code-block:: python + + # Building simple custom third party model client and using it + # I have modified convert_inputs_to_api_kwargs() to make sure it follows the prompt of openai and i have used appropiate + # openai api call in __call__() + + import openai + from adalflow.core.model_client import ModelClient + from adalflow.core.types import ModelType, GeneratorOutput, EmbedderOutput + from openai.types import ( + CreateEmbeddingResponse, + ) + from adalflow.components.model_client.utils import parse_embedding_response + +This class defines the custom model client. The constructor initializes the client by calling the parent class’s initializer (ModelClient), which is essential for the setup of the Adalflow framework. + +.. code-block:: python + + class SimpleCustomModelClient(ModelClient): + # Initialize the custom model client + def __init__(self): + # Call the parent class's initializer + super().__init__() + pass # Placeholder for any initialization logic if needed in the future + + # Method to convert input into API parameters for different model types (LLM or Embedder) + def convert_inputs_to_api_kwargs( + self, input=None, model_kwargs={}, model_type=ModelType.UNDEFINED + ): + """ + Convert the inputs into API arguments based on the model type. + + Args: + input (str): The input text to be processed. + model_kwargs (dict): Additional model parameters like temperature, max_tokens, etc. + model_type (ModelType): The type of model to use (LLM or Embedder). + + Returns: + dict: API arguments formatted for the specified model type. + """ + if ( + model_type == ModelType.LLM + ): # If the model type is a large language model (LLM) + return { + "model": model_kwargs[ + "model" + ], # Set the model to use (e.g., GPT-3, GPT-4) + "messages": input, # Provide the input as the message + "temperature": model_kwargs[ + "temperature" + ], # Set the temperature (creativity of the response) + "max_tokens": model_kwargs[ + "max_tokens" + ], # Max tokens to generate in the response + } + elif model_type == ModelType.EMBEDDER: # If the model type is an embedder + return { + "model": model_kwargs["model"], # Model name for embedding + "input": [input], # Provide the input in a list format for embedding + } + else: + # Raise an error if the model type is unsupported + raise ValueError(f"model_type {model_type} is not supported") + + # Method to make the actual API call to OpenAI for either completions (LLM) or embeddings + def call(self, api_kwargs={}, model_type=ModelType.UNDEFINED): + """ + Call the appropriate OpenAI API method based on the model type (LLM or Embedder). + + Args: + api_kwargs (dict): Arguments to be passed to the API call. + model_type (ModelType): The type of model (LLM or Embedder). + + Returns: + Response: The API response from OpenAI. + """ + if model_type == ModelType.LLM: # If the model type is LLM (e.g., GPT-3, GPT-4) + return openai.chat.completions.create( + **api_kwargs + ) # Call the chat API for completion + elif model_type == ModelType.EMBEDDER: # If the model type is Embedder + return openai.embeddings.create(**api_kwargs) # Call the embedding API + else: + # Raise an error if an invalid model type is passed + raise ValueError(f"Unsupported model type: {model_type}") + + # Method to parse the response from a chat completion API call + def parse_chat_completion(self, completion): + """ + Parse the response from a chat completion API call into a custom output format. + + Args: + completion: The completion response from the OpenAI API. + + Returns: + GeneratorOutput: A custom data structure containing the parsed response. + """ + # Note: GeneratorOutput is a adalflow dataclass that contains the parsed completion data + return GeneratorOutput( + data=completion, # Store the raw completion data + error=None, # No error in this case + raw_response=str(completion), # Store the raw response as a string + ) + + # Method to parse the response from an embedding API call + def parse_embedding_response( + self, response: CreateEmbeddingResponse + ) -> EmbedderOutput: + """ + Parse the response from an embedding API call into a custom output format. + + Args: + response (CreateEmbeddingResponse): The response from the embedding API. + + Returns: + EmbedderOutput: A custom data structure containing the parsed embedding response. + """ + try: + # Attempt to parse the embedding response using a helper function + return parse_embedding_response(response) + except Exception as e: + # If parsing fails, return an error message with the raw response + return EmbedderOutput(data=[], error=str(e), raw_response=response) + +In below block, the custom model client is instantiated, and a query is defined for processing by both an LLM (like GPT-3.5) and an Embedder model. The API arguments are converted, and the call() method is used to fetch responses. Finally, both types of responses (LLM and Embedder) are parsed and printed. + +.. code-block:: python + + def build_custom_model_client(): + # Instantiate the custom model client (SimpleCustomModelClient) + custom_client = SimpleCustomModelClient() + + # Define the query for the model to process + query = "What is the capital of France?" + + # Set the model type for a Large Language Model (LLM) + model_type = ModelType.LLM + + # Prepare the message prompt as expected by the OpenAI chat API. + # This format is suitable for GPT-like models (e.g., gpt-3.5-turbo). + message_prompt = [ + { + "role": "user", # Define the user role in the conversation + "content": [ + { + "type": "text", # Specify that the input is a text type + "text": query, # The actual query to be processed by the model + } + ], + } + ] + + # Print message indicating the usage of the LLM model type + print("ModelType LLM") + + # Define additional model parameters like model name, temperature, and max tokens for LLM + model_kwargs = {"model": "gpt-3.5-turbo", "temperature": 0.5, "max_tokens": 100} + + # Convert the input message and model kwargs into the required API parameters + api_kwargs = custom_client.convert_inputs_to_api_kwargs( + input=message_prompt, model_kwargs=model_kwargs, model_type=model_type + ) + + # Print the API arguments that will be passed to the call method + print(f"api_kwargs: {api_kwargs}") + + # Call the LLM model using the prepared API arguments + result = custom_client.call(api_kwargs, ModelType.LLM) + + # Print the result of the LLM model call (response from OpenAI) + print(result) + + # Parse the chat completion response and output a more structured result + response_text = custom_client.parse_chat_completion(result) + + # Print the structured response from the chat completion + print(f"response_text: {response_text}") + + # Switch to using the Embedder model type + print("ModelType EMBEDDER") + + # Define model-specific parameters for the embedding model + model_kwargs = { + "model": "text-embedding-3-small", + "dimensions": 8, + "encoding_format": "float", + } + + # Convert the input query for the embedder model + api_kwargs = custom_client.convert_inputs_to_api_kwargs( + input=query, model_kwargs=model_kwargs, model_type=ModelType.EMBEDDER + ) + + # Print the API arguments that will be passed to the embedder model + print(f"embedder api_kwargs: {api_kwargs}") + + # Call the Embedder model using the prepared API arguments + result = custom_client.call(api_kwargs, ModelType.EMBEDDER) + + # Print the result of the Embedder model call (embedding response) + print(result) + + # Parse the embedding response and output a more structured result + response_text = custom_client.parse_embedding_response(result) + + # Print the structured response from the embedding model + print(f"response_text: {response_text}") + +This is the function call that triggers the execution of the custom model client, processing the defined query and displaying results for both LLM and Embedder. + +.. code-block:: python + + build_custom_model_client() .. admonition:: API reference :class: highlight From e512584572bc57713fd8650eff880c48d1e56487 Mon Sep 17 00:00:00 2001 From: fm1320 Date: Tue, 26 Nov 2024 12:45:06 +0000 Subject: [PATCH 17/40] Run pre-commit hooks --- docs/source/tutorials/model_client.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/tutorials/model_client.rst b/docs/source/tutorials/model_client.rst index 662c03b4..438d34d3 100644 --- a/docs/source/tutorials/model_client.rst +++ b/docs/source/tutorials/model_client.rst @@ -956,7 +956,7 @@ Groq LLM Chat - Multichat Usage Note: Groq doesnt have embedder method to get embeddings like openai -The following example demonstrates how to set up a multi-turn conversation with the Groq LLM using the ``GroqAPIClient``. +The following example demonstrates how to set up a multi-turn conversation with the Groq LLM using the ``GroqAPIClient``. .. code-block:: python From 04ed18fc2d4632d18ac2aaf99b36eb5fb956a0d1 Mon Sep 17 00:00:00 2001 From: Li Yin Date: Tue, 26 Nov 2024 09:33:11 -0800 Subject: [PATCH 18/40] rebase to main --- adalflow/adalflow/components/model_client/bedrock_client.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/adalflow/adalflow/components/model_client/bedrock_client.py b/adalflow/adalflow/components/model_client/bedrock_client.py index b2d5b549..d25b48bc 100644 --- a/adalflow/adalflow/components/model_client/bedrock_client.py +++ b/adalflow/adalflow/components/model_client/bedrock_client.py @@ -34,11 +34,6 @@ def get_first_message_content(completion: Dict) -> str: "get_first_message_content", "bedrock_runtime_exceptions", ] -__all__ = [ - "BedrockAPIClient", - "get_first_message_content", - "bedrock_runtime_exceptions", -] class BedrockAPIClient(ModelClient): From 2ed293b7c79075af7e0dddf2c3608d11fe2e385e Mon Sep 17 00:00:00 2001 From: fm1320 Date: Wed, 27 Nov 2024 12:00:27 +0000 Subject: [PATCH 19/40] rag playbook and linter --- docs/source/tutorials/rag_playbook.rst | 8 +- .../tutorials/adalflow_rag_playbook.ipynb | 522 ++++++++++++++++++ 2 files changed, 525 insertions(+), 5 deletions(-) create mode 100644 notebooks/tutorials/adalflow_rag_playbook.ipynb diff --git a/docs/source/tutorials/rag_playbook.rst b/docs/source/tutorials/rag_playbook.rst index 685bb4ea..9175a09f 100644 --- a/docs/source/tutorials/rag_playbook.rst +++ b/docs/source/tutorials/rag_playbook.rst @@ -1,11 +1,9 @@ -.. -.. Try Quickstart in Colab -.. - .. raw:: html
- + + Try RAG playbook in Colab + GitHub Open Source Code diff --git a/notebooks/tutorials/adalflow_rag_playbook.ipynb b/notebooks/tutorials/adalflow_rag_playbook.ipynb new file mode 100644 index 00000000..27c6bda0 --- /dev/null +++ b/notebooks/tutorials/adalflow_rag_playbook.ipynb @@ -0,0 +1,522 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Adalflow RAG Playbook example\n", + "\n", + "There are different patterns to build a RAG:\n", + "\n", + "- RAG with separate data process pipeline and a RAG task pipeline. This fits into a scenario where there is lots of data in production database, and we preprocess the data to embeddings and then we build a RAG task pipeline that retrieves context in multiple stages.\n", + "\n", + "- RAG with dynamic data access and caching the embedding dynamically in a local storage.\n", + "\n", + "Here we will have have a look at an example with a local DB using FAISS" + ], + "metadata": { + "id": "lLGpv1fLLIjF" + } + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "sfKEfaYC3Go7" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "\n", + "clear_output()\n" + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "os.environ['GROQ_API_KEY'] = groq_api_key\n", + "\n", + "print(\"API keys have been set.\")\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "-4c_AGBt3PlR", + "outputId": "a36f157b-0b18-4f3d-d5a8-09aa94743922" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "Please enter your GROQ API key: ··········\n", + "API keys have been set.\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "from typing import Any, List, Optional\n", + "import os\n", + "from adalflow.core import Component, Generator, Embedder, Sequential\n", + "from adalflow.core.types import Document, ModelClientType\n", + "from adalflow.core.string_parser import JsonParser\n", + "from adalflow.core.db import LocalDB\n", + "from adalflow.utils import setup_env\n", + "from adalflow.components.retriever.faiss_retriever import FAISSRetriever\n", + "from adalflow.components.data_process import (\n", + " RetrieverOutputToContextStr,\n", + " ToEmbeddings,\n", + " TextSplitter,\n", + ")\n", + "from adalflow.utils.global_config import get_adalflow_default_root_path\n" + ], + "metadata": { + "id": "V9LsGDnm3RbV" + }, + "execution_count": 4, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "configs = {\n", + " \"embedder\": {\n", + " \"batch_size\": 100,\n", + " \"model_kwargs\": {\n", + " \"model\": \"text-embedding-3-small\",\n", + " \"dimensions\": 256,\n", + " \"encoding_format\": \"float\",\n", + " },\n", + " },\n", + " \"retriever\": {\n", + " \"top_k\": 5,\n", + " },\n", + " \"generator\": {\n", + " \"model_client\": ModelClientType.OPENAI(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-3.5-turbo\",\n", + " \"temperature\": 0.3,\n", + " \"stream\": False,\n", + " },\n", + " },\n", + " \"text_splitter\": {\n", + " \"split_by\": \"word\",\n", + " \"chunk_size\": 400,\n", + " \"chunk_overlap\": 200,\n", + " },\n", + "}\n" + ], + "metadata": { + "id": "kWGTZxrw3Tli" + }, + "execution_count": 5, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def prepare_data_pipeline():\n", + " splitter = TextSplitter(**configs[\"text_splitter\"])\n", + " embedder = Embedder(\n", + " model_client=ModelClientType.OPENAI(),\n", + " model_kwargs=configs[\"embedder\"][\"model_kwargs\"],\n", + " )\n", + " embedder_transformer = ToEmbeddings(\n", + " embedder=embedder, batch_size=configs[\"embedder\"][\"batch_size\"]\n", + " )\n", + " data_transformer = Sequential(splitter, embedder_transformer)\n", + " return data_transformer\n", + "\n", + "def prepare_database_with_index(\n", + " docs: List[Document],\n", + " index_file: str = \"index.faiss\",\n", + " index_path: Optional[str] = None,\n", + "):\n", + " index_path = index_path or get_adalflow_default_root_path()\n", + " index_path = os.path.join(index_path, index_file)\n", + " if os.path.exists(index_path):\n", + " return None\n", + " db = LocalDB()\n", + " db.load(docs)\n", + " data_transformer = prepare_data_pipeline()\n", + " db.transform(data_transformer, key=\"data_transformer\")\n", + " db.save_state(index_path)\n" + ], + "metadata": { + "id": "1QE0PCKs4BLz" + }, + "execution_count": 6, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "RAG_PROMPT_TEMPLATE = r\"\"\"\n", + "{{task_desc}}\n", + "\n", + "\n", + "{{input_str}}\n", + "{{context_str}}\n", + "\n", + "\"\"\"\n", + "\n", + "rag_prompt_task_desc = r\"\"\"\n", + "You are a helpful assistant.\n", + "\n", + "Your task is to answer the query that may or may not come with context information.\n", + "When context is provided, you should stick to the context and less on your prior knowledge to answer the query.\n", + "\n", + "Output JSON format:\n", + "{\n", + " \"answer\": \"The answer to the query\",\n", + "}\"\"\"\n", + "\n", + "class RAG(Component):\n", + " def __init__(\n", + " self,\n", + " index_file: str = \"index.faiss\",\n", + " index_path: Optional[str] = None,\n", + " configs: dict = configs,\n", + " ):\n", + " super().__init__()\n", + "\n", + " index_path = index_path or get_adalflow_default_root_path()\n", + " index_path = os.path.join(index_path, index_file)\n", + " self.index_path = index_path\n", + "\n", + " if not os.path.exists(index_path):\n", + " self.db = LocalDB()\n", + " self.register_data_transformer()\n", + " self.transformed_docs = []\n", + " else:\n", + " self.db = LocalDB.load_state(index_path)\n", + " self.transformed_docs = self.db.get_transformed_data(\"data_transformer\")\n", + "\n", + " embedder = Embedder(\n", + " model_client=ModelClientType.OPENAI(),\n", + " model_kwargs=configs[\"embedder\"][\"model_kwargs\"],\n", + " )\n", + "\n", + " self.retriever = FAISSRetriever(\n", + " **configs[\"retriever\"],\n", + " embedder=embedder,\n", + " documents=self.transformed_docs,\n", + " document_map_func=lambda doc: doc.vector,\n", + " )\n", + " self.retriever_output_processors = RetrieverOutputToContextStr(deduplicate=True)\n", + "\n", + " self.generator = Generator(\n", + " **configs[\"generator\"],\n", + " prompt_kwargs={\"task_desc_str\": rag_prompt_task_desc},\n", + " output_processors=JsonParser(),\n", + " )\n", + "\n", + " def register_data_transformer(self):\n", + " if \"data_transformer\" not in self.db.get_transformer_keys():\n", + " data_transformer = prepare_data_pipeline()\n", + " self.db.register_transformer(data_transformer, key=\"data_transformer\")\n", + " print(\"Data transformer registered\")\n", + "\n", + " def add_documents(self, docs: List[Document]):\n", + " self.db.extend(docs, apply_transformer=True)\n", + " self.db.save_state(self.index_path)\n", + "\n", + " def get_transformed_docs(self, filter_func=None):\n", + " return self.db.get_transformed_data(\"data_transformer\", filter_func)\n", + "\n", + " def prepare_retriever(self, filter_func=None):\n", + " self.transformed_docs = self.get_transformed_docs(filter_func)\n", + " self.retriever.build_index_from_documents(\n", + " self.transformed_docs, document_map_func=lambda doc: doc.vector\n", + " )\n", + "\n", + " def generate(self, query: str, context: Optional[str] = None) -> Any:\n", + " if not self.generator:\n", + " raise ValueError(\"Generator is not set\")\n", + " prompt_kwargs = {\"context_str\": context, \"input_str\": query}\n", + " response = self.generator(prompt_kwargs=prompt_kwargs)\n", + " return response, context\n", + "\n", + " def call(self, query: str, verbose: bool = False) -> Any:\n", + " retrieved_documents = self.retriever(query)\n", + " for i, retriever_output in enumerate(retrieved_documents):\n", + " retrieved_documents[i].documents = [\n", + " self.transformed_docs[doc_index]\n", + " for doc_index in retriever_output.doc_indices\n", + " ]\n", + " if verbose:\n", + " print(f\"retrieved_documents: \\n {retrieved_documents}\")\n", + "\n", + " context_str = self.retriever_output_processors(retrieved_documents)\n", + " if verbose:\n", + " print(f\"context_str: \\n {context_str}\")\n", + "\n", + " return self.generate(query, context=context_str)\n" + ], + "metadata": { + "id": "6Mu1HXhy4DIG" + }, + "execution_count": 7, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "# Prepare initial documents\n", + "doc1 = Document(\n", + " meta_data={\"title\": \"Li Yin's profile\"},\n", + " text=\"My name is Li Yin, I love rock climbing\" + \"lots of nonsense text\" * 500,\n", + " id=\"doc1\",\n", + ")\n", + "doc2 = Document(\n", + " meta_data={\"title\": \"Interviewing Li Yin\"},\n", + " text=\"lots of more nonsense text\" * 250\n", + " + \"Li Yin is an AI researcher and a software engineer\"\n", + " + \"lots of more nonsense text\" * 250,\n", + " id=\"doc2\",\n", + ")\n", + "\n", + "# Prepare the database (only runs once)\n", + "prepare_database_with_index([doc1, doc2], index_file=\"index.faiss\")\n", + "\n", + "# Initialize RAG\n", + "rag = RAG(index_file=\"index.faiss\")\n", + "print(rag)\n", + "\n", + "# Query the RAG system\n", + "query = \"What is Li Yin's hobby and profession?\"\n", + "response = rag.call(query)\n", + "print(f\"Response: {response}\")\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "sPnx4PY34D1j", + "outputId": "f66d6f1a-70bf-40e9-a160-591fcfdcbed3" + }, + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "Splitting Documents in Batches: 100%|██████████| 1/1 [00:00<00:00, 109.58it/s]\n", + "Batch embedding documents: 100%|██████████| 1/1 [00:01<00:00, 1.33s/it]\n", + "Adding embeddings to documents from batch: 1it [00:00, 6462.72it/s]\n" + ] + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Saved the state of the DB to /root/.adalflow/index.faiss\n", + "RAG(\n", + " (db): LocalDB(name='LocalDB', items=[Document(id=doc1, text='My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense te...', meta_data={'title': \"Li Yin's profile\"}, vector=[], parent_doc_id=None, order=None, score=None), Document(id=doc2, text='lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...', meta_data={'title': 'Interviewing Li Yin'}, vector=[], parent_doc_id=None, order=None, score=None)], transformed_items={'data_transformer': [Document(id=59f7f6ad-eb4c-4fdb-8d04-6dba1ee439bc, text='My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense te...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=0, score=None), Document(id=2486725e-47ff-4978-84fc-7937778b0e45, text='textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nons...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=1, score=None), Document(id=96993047-4cff-436d-b8ac-e02da4ae7fec, text='nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlot...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=2, score=None), Document(id=77742f90-0c0c-4143-802d-3557577d4935, text='of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense text...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=3, score=None), Document(id=81ba770e-c5f2-4dc5-98fc-349ab9143ef9, text='textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nons...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=4, score=None), Document(id=dff6f5e3-5929-4e3c-ba5f-79f5116c1fa3, text='nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlot...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=5, score=None), Document(id=1e7888e2-0783-40b2-ab85-067e3ba71fad, text='of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense text...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=6, score=None), Document(id=2deb945f-dfb9-46d3-a60b-dae77e2f5fd8, text='lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=0, score=None), Document(id=3d9c21aa-d583-47fe-b143-710b4bc4a8b2, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=1, score=None), Document(id=a318ffea-2542-4493-ab2d-03d10a94e860, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=2, score=None), Document(id=b5c05820-7545-43a8-a4a3-691c5ccc79d1, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=3, score=None), Document(id=a739cd3e-8826-4e74-afa9-499498115621, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=4, score=None), Document(id=7153cde2-b6ee-4485-91e9-9de2f4bd45ab, text='textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsens...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=5, score=None), Document(id=c3f3ed48-acc2-41b5-b4ac-a6107b651789, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=6, score=None), Document(id=7bfd84e6-0025-4cfa-8c0a-63c9de9a8d4a, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=7, score=None), Document(id=8bece98d-65f0-4dd1-9407-d1c54413bef4, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=8, score=None), Document(id=cf9ab236-af73-4af6-9302-b3c7ffdd9ca7, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=9, score=None)]}, transformer_setups={'data_transformer': Sequential(\n", + " (0): TextSplitter(split_by=word, chunk_size=400, chunk_overlap=200)\n", + " (1): ToEmbeddings(\n", + " batch_size=100\n", + " (embedder): Embedder(\n", + " model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, \n", + " (model_client): OpenAIClient()\n", + " )\n", + " (batch_embedder): BatchEmbedder(\n", + " (embedder): Embedder(\n", + " model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, \n", + " (model_client): OpenAIClient()\n", + " )\n", + " )\n", + " )\n", + " )}, mapper_setups={}, index_path='/root/.adalflow/index.faiss')\n", + " (retriever): FAISSRetriever(\n", + " top_k=5, metric=prob, dimensions=256, total_documents=17\n", + " (embedder): Embedder(\n", + " model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, \n", + " (model_client): OpenAIClient()\n", + " )\n", + " )\n", + " (retriever_output_processors): RetrieverOutputToContextStr(deduplicate=True)\n", + " (generator): Generator(\n", + " model_kwargs={'model': 'gpt-3.5-turbo', 'temperature': 0.3, 'stream': False}, trainable_prompt_kwargs=[]\n", + " (prompt): Prompt(\n", + " template: \n", + " {# task desc #}\n", + " {% if task_desc_str %}\n", + " {{task_desc_str}}\n", + " {% else %}\n", + " You are a helpful assistant.\n", + " {% endif %}\n", + " {#input format#}\n", + " {% if input_format_str %}\n", + " \n", + " {{input_format_str}}\n", + " \n", + " {% endif %}\n", + " {# output format #}\n", + " {% if output_format_str %}\n", + " \n", + " {{output_format_str}}\n", + " \n", + " {% endif %}\n", + " {# tools #}\n", + " {% if tools_str %}\n", + " \n", + " {{tools_str}}\n", + " \n", + " {% endif %}\n", + " {# example #}\n", + " {% if examples_str %}\n", + " \n", + " {{examples_str}}\n", + " \n", + " {% endif %}\n", + " {# chat history #}\n", + " {% if chat_history_str %}\n", + " \n", + " {{chat_history_str}}\n", + " \n", + " {% endif %}\n", + " {#contex#}\n", + " {% if context_str %}\n", + " \n", + " {{context_str}}\n", + " \n", + " {% endif %}\n", + " \n", + " \n", + " {% if input_str %}\n", + " {{input_str}}\n", + " {% endif %}\n", + " \n", + " {# steps #}\n", + " {% if steps_str %}\n", + " \n", + " {{steps_str}}\n", + " \n", + " {% endif %}\n", + " , prompt_kwargs: {'task_desc_str': '\\nYou are a helpful assistant.\\n\\nYour task is to answer the query that may or may not come with context information.\\nWhen context is provided, you should stick to the context and less on your prior knowledge to answer the query.\\n\\nOutput JSON format:\\n{\\n \"answer\": \"The answer to the query\",\\n}'}, prompt_variables: ['examples_str', 'context_str', 'chat_history_str', 'tools_str', 'task_desc_str', 'input_str', 'input_format_str', 'output_format_str', 'steps_str']\n", + " )\n", + " (model_client): OpenAIClient()\n", + " (output_processors): JsonParser()\n", + " )\n", + ")\n", + "Response: (GeneratorOutput(id=None, data={'answer': \"Li Yin's hobby is rock climbing and profession is an AI researcher and a software engineer.\"}, error=None, usage=CompletionUsage(completion_tokens=25, prompt_tokens=2713, total_tokens=2738), raw_response='{\\n \"answer\": \"Li Yin\\'s hobby is rock climbing and profession is an AI researcher and a software engineer.\"\\n}', metadata=None), ' My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense ')\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "# Add more documents at runtime\n", + "doc3 = Document(\n", + " meta_data={\"title\": \"Apple's profile\"},\n", + " text=\"Apple is a cute dog with black and tan fur\" + \"lots of nonsense text\" * 500,\n", + " id=\"doc3\",\n", + ")\n", + "doc4 = Document(\n", + " meta_data={\"title\": \"Apple's characteristics\"},\n", + " text=\"lots of more nonsense text\" * 250\n", + " + \"Apple is energetic, loves to play with her monkey toy\"\n", + " + \"lots of more nonsense text\" * 250,\n", + " id=\"doc4\",\n", + ")\n", + "\n", + "rag.add_documents([doc3, doc4])\n", + "rag.prepare_retriever()\n", + "\n", + "# Test a new query\n", + "query = \"What is Apple's favorite toy?\"\n", + "response = rag.call(query)\n", + "print(f\"Response: {response}\")\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "bcC1-dCheVEC", + "outputId": "133bab3f-ff2e-40db-99dc-71d64af6283f" + }, + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "Splitting Documents in Batches: 100%|██████████| 1/1 [00:00<00:00, 114.76it/s]\n", + "Batch embedding documents: 100%|██████████| 1/1 [00:00<00:00, 1.35it/s]\n", + "Adding embeddings to documents from batch: 1it [00:00, 1915.21it/s]\n" + ] + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Saved the state of the DB to /root/.adalflow/index.faiss\n", + "Response: (GeneratorOutput(id=None, data={'answer': \"Apple's favorite toy is her monkey toy.\"}, error=None, usage=CompletionUsage(completion_tokens=16, prompt_tokens=2647, total_tokens=2663), raw_response='{\\n \"answer\": \"Apple\\'s favorite toy is her monkey toy.\"\\n}', metadata=None), ' Apple is a cute dog with black and tan furlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots textApple is energetic, loves to play with her monkey toylots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textApple is energetic, loves to play with her monkey toylots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more ')\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "# View all documents in the database\n", + "print(\"All documents in the database:\")\n", + "for item in rag.db.items:\n", + " print(f\"ID: {item.id}, Title: {item.meta_data['title']}, Text: {item.text[:100]}...\")\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "o9TzVv5GeZZ2", + "outputId": "bde56355-186c-4013-d702-b4530f82881b" + }, + "execution_count": 10, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "All documents in the database:\n", + "ID: doc1, Title: Li Yin's profile, Text: My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense te...\n", + "ID: doc2, Title: Interviewing Li Yin, Text: lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...\n", + "ID: doc3, Title: Apple's profile, Text: Apple is a cute dog with black and tan furlots of nonsense textlots of nonsense textlots of nonsense...\n", + "ID: doc4, Title: Apple's characteristics, Text: lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...\n" + ] + } + ] + } + ] +} From 5116487487f152788523ae681804590ff65eedb3 Mon Sep 17 00:00:00 2001 From: fm1320 Date: Thu, 28 Nov 2024 13:46:14 +0000 Subject: [PATCH 20/40] Add tutorials and linter passed --- docs/source/tutorials/logging.rst | 16 + docs/source/tutorials/tool_helper.rst | 12 + .../tutorials/adalflow_function_calls.ipynb | 737 ++++++++++++++++++ notebooks/tutorials/adalflow_logger.ipynb | 242 ++++++ tutorials/adalflow_function_calls.py | 108 +++ tutorials/adalflow_logger.py | 143 ++++ 6 files changed, 1258 insertions(+) create mode 100644 notebooks/tutorials/adalflow_function_calls.ipynb create mode 100644 notebooks/tutorials/adalflow_logger.ipynb create mode 100644 tutorials/adalflow_function_calls.py create mode 100644 tutorials/adalflow_logger.py diff --git a/docs/source/tutorials/logging.rst b/docs/source/tutorials/logging.rst index 04d31de9..1f566758 100644 --- a/docs/source/tutorials/logging.rst +++ b/docs/source/tutorials/logging.rst @@ -1,3 +1,19 @@ +Logger Example +------------- + +.. raw:: html + + + + Logging ==================== diff --git a/docs/source/tutorials/tool_helper.rst b/docs/source/tutorials/tool_helper.rst index 6b3736fd..4b607a26 100644 --- a/docs/source/tutorials/tool_helper.rst +++ b/docs/source/tutorials/tool_helper.rst @@ -1,3 +1,15 @@ +.. raw:: html + + + .. _tool_helper: Function calls diff --git a/notebooks/tutorials/adalflow_function_calls.ipynb b/notebooks/tutorials/adalflow_function_calls.ipynb new file mode 100644 index 00000000..622448c9 --- /dev/null +++ b/notebooks/tutorials/adalflow_function_calls.ipynb @@ -0,0 +1,737 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Function calls\n", + "\n", + "Tools are means LLM can use to interact with the world beyond of its internal knowledge. Technically speaking, retrievers are tools to help LLM to get more relevant context, and memory is a tool for LLM to carry out a conversation. Deciding when, which, and how to use a tool, and even to creating a tool is an agentic behavior: Function calls is a process of showing LLM a list of funciton definitions and prompt it to choose one or few of them. Many places use tools and function calls interchangably.\n", + "\n", + "In this notebook we will covert function calls, including:\n", + "\n", + "- Function call walkthrough\n", + "\n", + "- Overall design\n", + "\n", + "- Function call in action\n", + "\n", + "It follows the tutorial here: https://adalflow.sylph.ai/tutorials/tool_helper.html#" + ], + "metadata": { + "id": "lLGpv1fLLIjF" + } + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "sfKEfaYC3Go7" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "\n", + "clear_output()\n" + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "os.environ['GROQ_API_KEY'] = groq_api_key\n", + "\n", + "print(\"API keys have been set.\")\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "-4c_AGBt3PlR", + "outputId": "21a26437-9f95-4478-84e9-ba4369956b6f" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "Please enter your GROQ API key: ··········\n", + "API keys have been set.\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "from dataclasses import dataclass\n", + "from typing import List\n", + "import numpy as np\n", + "import time\n", + "import asyncio\n", + "\n", + "\n", + "\n", + "def multiply(a: int, b: int) -> int:\n", + " \"\"\"Multiply two numbers.\"\"\"\n", + " time.sleep(1)\n", + " return a * b\n", + "\n", + "\n", + "def add(a: int, b: int) -> int:\n", + " \"\"\"Add two numbers.\"\"\"\n", + " time.sleep(1)\n", + " return a + b\n", + "\n", + "\n", + "async def divide(a: float, b: float) -> float:\n", + " \"\"\"Divide two numbers.\"\"\"\n", + " await asyncio.sleep(1)\n", + " return float(a) / b\n", + "\n", + "\n", + "async def search(query: str) -> List[str]:\n", + " \"\"\"Search for query and return a list of results.\"\"\"\n", + " await asyncio.sleep(1)\n", + " return [\"result1\" + query, \"result2\" + query]\n", + "\n", + "\n", + "def numpy_sum(arr: np.ndarray) -> float:\n", + " \"\"\"Sum the elements of an array.\"\"\"\n", + " return np.sum(arr)\n", + "\n", + "\n", + "x = 2\n", + "\n", + "@dataclass\n", + "class Point:\n", + " x: int\n", + " y: int\n", + "\n", + "\n", + "def add_points(p1: Point, p2: Point) -> Point:\n", + " return Point(p1.x + p2.x, p1.y + p2.y)" + ], + "metadata": { + "id": "GMKuuP7xR9Nt" + }, + "execution_count": 4, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## Function Tool" + ], + "metadata": { + "id": "jCA7HMjtT16P" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.func_tool import FunctionTool\n", + "\n", + "functions =[multiply, add, divide, search, numpy_sum, add_points]\n", + "tools = [\n", + " FunctionTool(fn=fn) for fn in functions\n", + "]\n", + "for tool in tools:\n", + " print(tool)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "fgOEoLoDSBqh", + "outputId": "7e636e2c-9a5d-44f1-f0fe-fe8a6bea474d" + }, + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='multiply', func_desc='multiply(a: int, b: int) -> int\\nMultiply two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']}))\n", + "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add', func_desc='add(a: int, b: int) -> int\\nAdd two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']}))\n", + "FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='divide', func_desc='divide(a: float, b: float) -> float\\nDivide two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'float'}, 'b': {'type': 'float'}}, 'required': ['a', 'b']}))\n", + "FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='search', func_desc='search(query: str) -> List[str]\\nSearch for query and return a list of results.', func_parameters={'type': 'object', 'properties': {'query': {'type': 'str'}}, 'required': ['query']}))\n", + "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='numpy_sum', func_desc='numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', func_parameters={'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']}))\n", + "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add_points', func_desc='add_points(p1: __main__.Point, p2: __main__.Point) -> __main__.Point\\nNone', func_parameters={'type': 'object', 'properties': {'p1': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}, 'p2': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}}, 'required': ['p1', 'p2']}))\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "print(tools[-2].definition.to_dict())" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "CYJaHFhGSEzH", + "outputId": "9ab36c6c-7509-4e7f-ce85-11dae889c8c2" + }, + "execution_count": 6, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "{'func_name': 'numpy_sum', 'func_desc': 'numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', 'func_parameters': {'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']}}\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "context_map = {tool.definition.func_name: tool for tool in tools}" + ], + "metadata": { + "id": "_O4bQgXrSKb6" + }, + "execution_count": 7, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "function_name = \"add\"\n", + "function_to_call = context_map[function_name]\n", + "function_args = {\"a\": 1, \"b\": 2}\n", + "function_response = function_to_call.call(**function_args)" + ], + "metadata": { + "id": "-RgWWMdISL1u" + }, + "execution_count": 8, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.tool_manager import ToolManager\n", + "\n", + "tool_manager = ToolManager(tools=functions)\n", + "print(tool_manager)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "6CT7Tez1SOai", + "outputId": "e486d882-9179-4db3-f077-6adfc9fc6579" + }, + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "ToolManager(Tools: [FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='multiply', func_desc='multiply(a: int, b: int) -> int\\nMultiply two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add', func_desc='add(a: int, b: int) -> int\\nAdd two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='divide', func_desc='divide(a: float, b: float) -> float\\nDivide two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'float'}, 'b': {'type': 'float'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='search', func_desc='search(query: str) -> List[str]\\nSearch for query and return a list of results.', func_parameters={'type': 'object', 'properties': {'query': {'type': 'str'}}, 'required': ['query']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='numpy_sum', func_desc='numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', func_parameters={'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add_points', func_desc='add_points(p1: __main__.Point, p2: __main__.Point) -> __main__.Point\\nNone', func_parameters={'type': 'object', 'properties': {'p1': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}, 'p2': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}}, 'required': ['p1', 'p2']}))], Additional Context: {})\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## ToolManager" + ], + "metadata": { + "id": "jzFqNnN_T-cu" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.tool_manager import ToolManager\n", + "\n", + "tool_manager = ToolManager(tools=functions)\n", + "print(tool_manager)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "JX7MibWiUF3U", + "outputId": "20707186-5ec3-49a4-d553-c3160c3daa84" + }, + "execution_count": 10, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "ToolManager(Tools: [FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='multiply', func_desc='multiply(a: int, b: int) -> int\\nMultiply two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add', func_desc='add(a: int, b: int) -> int\\nAdd two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='divide', func_desc='divide(a: float, b: float) -> float\\nDivide two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'float'}, 'b': {'type': 'float'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='search', func_desc='search(query: str) -> List[str]\\nSearch for query and return a list of results.', func_parameters={'type': 'object', 'properties': {'query': {'type': 'str'}}, 'required': ['query']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='numpy_sum', func_desc='numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', func_parameters={'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add_points', func_desc='add_points(p1: __main__.Point, p2: __main__.Point) -> __main__.Point\\nNone', func_parameters={'type': 'object', 'properties': {'p1': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}, 'p2': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}}, 'required': ['p1', 'p2']}))], Additional Context: {})\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Function Call end-to-end" + ], + "metadata": { + "id": "9Bw2fs--UKX7" + } + }, + { + "cell_type": "code", + "source": [ + "template = r\"\"\"You have these tools available:\n", + "{% if tools %}\n", + "\n", + "{% for tool in tools %}\n", + "{{ loop.index }}.\n", + "{{tool}}\n", + "------------------------\n", + "{% endfor %}\n", + "\n", + "{% endif %}\n", + "\n", + "{{output_format_str}}\n", + "\n", + "\n", + "User: {{input_str}}\n", + "You:\n", + "\"\"\"" + ], + "metadata": { + "id": "TywPQMIVUOqh" + }, + "execution_count": 11, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.prompt_builder import Prompt\n", + "\n", + "prompt = Prompt(template=template)\n", + "small_tool_manager = ToolManager(tools=tools[:2])\n", + "\n", + "renered_prompt = prompt(tools=small_tool_manager.yaml_definitions)\n", + "print(renered_prompt)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "-vMajeXoUQ5A", + "outputId": "ca68601b-e9c8-41c3-a6fa-777f225e68e3" + }, + "execution_count": 12, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "You have these tools available:\n", + "\n", + "1.\n", + "func_name: multiply\n", + "func_desc: 'multiply(a: int, b: int) -> int\n", + "\n", + " Multiply two numbers.'\n", + "func_parameters:\n", + " type: object\n", + " properties:\n", + " a:\n", + " type: int\n", + " b:\n", + " type: int\n", + " required:\n", + " - a\n", + " - b\n", + "------------------------\n", + "2.\n", + "func_name: add\n", + "func_desc: 'add(a: int, b: int) -> int\n", + "\n", + " Add two numbers.'\n", + "func_parameters:\n", + " type: object\n", + " properties:\n", + " a:\n", + " type: int\n", + " b:\n", + " type: int\n", + " required:\n", + " - a\n", + " - b\n", + "------------------------\n", + "\n", + "\n", + "None\n", + "\n", + "\n", + "User: None\n", + "You:\n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.types import Function\n", + "\n", + "output_data_class = Function\n", + "output_format_str = output_data_class.to_json_signature(exclude=[\"thought\", \"args\"])\n", + "\n", + "renered_prompt= prompt(output_format_str=output_format_str)\n", + "print(renered_prompt)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "V9-90IFRUUNT", + "outputId": "ed2f829e-c656-43c6-a454-8a7c32d5dafe" + }, + "execution_count": 13, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "You have these tools available:\n", + "\n", + "{\n", + " \"name\": \"The name of the function (str) (optional)\",\n", + " \"kwargs\": \"The keyword arguments of the function (Optional[Dict[str, object]]) (optional)\"\n", + "}\n", + "\n", + "\n", + "User: None\n", + "You:\n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.types import FunctionExpression\n", + "\n", + "output_data_class = FunctionExpression\n", + "output_format_str = output_data_class.to_json_signature(exclude=[\"thought\"])\n", + "print(prompt(output_format_str=output_format_str))" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "p3kPMhWaUYT1", + "outputId": "a3de7117-c3eb-404e-e2e7-8a5187b32f6b" + }, + "execution_count": 14, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "You have these tools available:\n", + "\n", + "{\n", + " \"action\": \"FuncName() Valid function call expression. Example: \\\"FuncName(a=1, b=2)\\\" Follow the data type specified in the function parameters.e.g. for Type object with x,y properties, use \\\"ObjectType(x=1, y=2) (str) (required)\"\n", + "}\n", + "\n", + "\n", + "User: None\n", + "You:\n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.components.output_parsers import JsonOutputParser\n", + "\n", + "func_parser = JsonOutputParser(data_class=Function, exclude_fields=[\"thought\", \"args\"])\n", + "instructions = func_parser.format_instructions()\n", + "print(instructions)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "MvGyoUmMUatR", + "outputId": "e819866b-f6e3-4c88-f9f1-22d725a28865" + }, + "execution_count": 17, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Your output should be formatted as a standard JSON instance with the following schema:\n", + "```\n", + "{\n", + " \"name\": \"The name of the function (str) (optional)\",\n", + " \"kwargs\": \"The keyword arguments of the function (Optional[Dict[str, object]]) (optional)\"\n", + "}\n", + "```\n", + "-Make sure to always enclose the JSON output in triple backticks (```). Please do not add anything other than valid JSON output!\n", + "-Use double quotes for the keys and string values.\n", + "-DO NOT mistaken the \"properties\" and \"type\" in the schema as the actual fields in the JSON output.\n", + "-Follow the JSON formatting conventions.\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Function Output Format" + ], + "metadata": { + "id": "9W7DiGcpUme5" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.generator import Generator\n", + "from adalflow.core.types import ModelClientType\n", + "\n", + "model_kwargs = {\"model\": \"gpt-4o-mini\"}\n", + "prompt_kwargs = {\n", + " \"tools\": tool_manager.yaml_definitions,\n", + " \"output_format_str\": func_parser.format_instructions(),\n", + "}\n", + "generator = Generator(\n", + " model_client=ModelClientType.OPENAI(),\n", + " model_kwargs=model_kwargs,\n", + " template=template,\n", + " prompt_kwargs=prompt_kwargs,\n", + " output_processors=func_parser,\n", + ")" + ], + "metadata": { + "id": "z5tNhoruUp6o" + }, + "execution_count": 20, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "queries = [\n", + " \"add 2 and 3\",\n", + " \"search for something\",\n", + " \"add points (1, 2) and (3, 4)\",\n", + " \"sum numpy array with arr = np.array([[1, 2], [3, 4]])\",\n", + " \"multiply 2 with local variable x\",\n", + " \"divide 2 by 3\",\n", + " \"Add 5 to variable y\",\n", + "]\n", + "\n", + "for idx, query in enumerate(queries):\n", + " prompt_kwargs = {\"input_str\": query}\n", + " print(f\"\\n{idx} Query: {query}\")\n", + " print(f\"{'-'*50}\")\n", + " try:\n", + " result = generator(prompt_kwargs=prompt_kwargs)\n", + " # print(f\"LLM raw output: {result.raw_response}\")\n", + " func = Function.from_dict(result.data)\n", + " print(f\"Function: {func}\")\n", + " func_output = tool_manager.execute_func(func)\n", + " print(f\"Function output: {func_output}\")\n", + " except Exception as e:\n", + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "9DCukn1SUs_x", + "outputId": "dcfd952c-0699-4d79-ee6d-a59373e3c75d" + }, + "execution_count": 21, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\n", + "0 Query: add 2 and 3\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='add', args=[], kwargs={'a': 2, 'b': 3})\n", + "Function output: FunctionOutput(name='add', input=Function(thought=None, name='add', args=(), kwargs={'a': 2, 'b': 3}), parsed_input=None, output=5, error=None)\n", + "\n", + "1 Query: search for something\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='search', args=[], kwargs={'query': 'something'})\n", + "Function output: FunctionOutput(name='search', input=Function(thought=None, name='search', args=(), kwargs={'query': 'something'}), parsed_input=None, output=['result1something', 'result2something'], error=None)\n", + "\n", + "2 Query: add points (1, 2) and (3, 4)\n", + "--------------------------------------------------\n" + ] + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "ERROR:adalflow.core.func_tool:Error at calling : 'dict' object has no attribute 'x'\n" + ] + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function: Function(thought=None, name='add_points', args=[], kwargs={'p1': {'x': 1, 'y': 2}, 'p2': {'x': 3, 'y': 4}})\n", + "Function output: FunctionOutput(name='add_points', input=Function(thought=None, name='add_points', args=(), kwargs={'p1': {'x': 1, 'y': 2}, 'p2': {'x': 3, 'y': 4}}), parsed_input=None, output=None, error=\"'dict' object has no attribute 'x'\")\n", + "\n", + "3 Query: sum numpy array with arr = np.array([[1, 2], [3, 4]])\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='numpy_sum', args=[], kwargs={'arr': [[1, 2], [3, 4]]})\n", + "Function output: FunctionOutput(name='numpy_sum', input=Function(thought=None, name='numpy_sum', args=(), kwargs={'arr': [[1, 2], [3, 4]]}), parsed_input=None, output=10, error=None)\n", + "\n", + "4 Query: multiply 2 with local variable x\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='multiply', args=[], kwargs={'a': 2, 'b': 'x'})\n", + "Function output: FunctionOutput(name='multiply', input=Function(thought=None, name='multiply', args=(), kwargs={'a': 2, 'b': 'x'}), parsed_input=None, output='xx', error=None)\n", + "\n", + "5 Query: divide 2 by 3\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='divide', args=[], kwargs={'a': 2.0, 'b': 3.0})\n", + "Function output: FunctionOutput(name='divide', input=Function(thought=None, name='divide', args=(), kwargs={'a': 2.0, 'b': 3.0}), parsed_input=None, output=0.6666666666666666, error=None)\n", + "\n", + "6 Query: Add 5 to variable y\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='add', args=[], kwargs={'a': 5, 'b': 'y'})\n" + ] + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "ERROR:adalflow.core.func_tool:Error at calling : unsupported operand type(s) for +: 'int' and 'str'\n" + ] + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Function output: FunctionOutput(name='add', input=Function(thought=None, name='add', args=(), kwargs={'a': 5, 'b': 'y'}), parsed_input=None, output=None, error=\"unsupported operand type(s) for +: 'int' and 'str'\")\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## FunctionExpression Output Format" + ], + "metadata": { + "id": "O-sBTPATUwsD" + } + }, + { + "cell_type": "code", + "source": [ + "tool_manager = ToolManager(\n", + " tools=functions,\n", + " additional_context={\"x\": x, \"y\": 0, \"np.array\": np.array, \"np\": np},\n", + ")\n", + "func_parser = JsonOutputParser(data_class=FunctionExpression)" + ], + "metadata": { + "id": "TVRZ44N1UyWg" + }, + "execution_count": 22, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "context = r\"\"\"\n", + "Your function expression also have access to these context:\n", + "{{context_str}}\n", + "\n", + "\"\"\"" + ], + "metadata": { + "id": "9h47p4XpU2BC" + }, + "execution_count": 23, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "async def run_async_function_call(self, generator, tool_manager):\n", + " answers = []\n", + " start_time = time.time()\n", + " tasks = []\n", + " for idx, query in enumerate(queries):\n", + " tasks.append(self.process_query(idx, query, generator, tool_manager))\n", + "\n", + " results = await asyncio.gather(*tasks)\n", + " answers.extend(results)\n", + " end_time = time.time()\n", + " print(f\"Total time taken: {end_time - start_time :.2f} seconds\")\n", + " return answers\n", + "\n", + "async def process_query(self, idx, query, generator, tool_manager: ToolManager):\n", + " print(f\"\\n{idx} Query: {query}\")\n", + " print(f\"{'-'*50}\")\n", + " try:\n", + " result = generator(prompt_kwargs={\"input_str\": query})\n", + " func_expr = FunctionExpression.from_dict(result.data)\n", + " print(f\"Function_expr: {func_expr}\")\n", + " func = tool_manager.parse_func_expr(func_expr)\n", + " func_output = await tool_manager.execute_func_async(func)\n", + " print(f\"Function output: {func_output}\")\n", + " return func_output\n", + " except Exception as e:\n", + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )\n", + " return None" + ], + "metadata": { + "id": "n9Qq7wcOU4X9" + }, + "execution_count": 24, + "outputs": [] + } + ] +} diff --git a/notebooks/tutorials/adalflow_logger.ipynb b/notebooks/tutorials/adalflow_logger.ipynb new file mode 100644 index 00000000..135d6450 --- /dev/null +++ b/notebooks/tutorials/adalflow_logger.ipynb @@ -0,0 +1,242 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Adalflow RAG Playbook example\n", + "\n", + "There are different patterns to build a RAG:\n", + "\n", + "- RAG with separate data process pipeline and a RAG task pipeline. This fits into a scenario where there is lots of data in production database, and we preprocess the data to embeddings and then we build a RAG task pipeline that retrieves context in multiple stages.\n", + "\n", + "- RAG with dynamic data access and caching the embedding dynamically in a local storage.\n", + "\n", + "Here we will have have a look at an example with a local DB using FAISS" + ], + "metadata": { + "id": "lLGpv1fLLIjF" + } + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "sfKEfaYC3Go7" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "\n", + "clear_output()\n" + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "os.environ['GROQ_API_KEY'] = groq_api_key\n", + "\n", + "print(\"API keys have been set.\")\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "-4c_AGBt3PlR", + "outputId": "275b050a-ce64-4b40-a5f9-4ccc12d92add" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "Please enter your GROQ API key: ··········\n", + "API keys have been set.\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Design\n", + "\n", + "Some libraries may use hooks [2] and callbacks [3] [4], or advanced web-based debugging tools [5] [6] [7]. Hooks and callbacks are conceptually similar in that they both allow users to execute custom code at specific points during the execution of a program. Both provide mechanisms to inject additional behavior in response to certain events or conditions, without modifying the core logic. PyTorch defines, registers, and executes hooks mainly in its base classes like nn.Module and Tensor, without polluting the functional and user-facing APIs.\n", + "\n", + "At this point, our objectives are:\n", + "\n", + "1. Maximize debugging capabilities via the simple logging module to keep the source code clean.\n", + "\n", + "2. Additionally, as we can’t always control the outputs of generators, we will provide customized logger and tracers(drop-in decorators) for them, for which we will explain in Tracing. This will not break the first objective.\n", + "\n", + "In the future, when we have more complex requirements from users, we will consider adding hooks/callbacks but we will do it in a way to keep the functional and user-facing APIs clean." + ], + "metadata": { + "id": "4NztjiLR_EQE" + } + }, + { + "cell_type": "code", + "source": [ + "import logging\n", + "\n", + "log = logging.getLogger(__name__)" + ], + "metadata": { + "id": "d2H1vYoC_F-g" + }, + "execution_count": 3, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.utils.logger import get_logger\n", + "\n", + "\n", + "root_logger = get_logger()" + ], + "metadata": { + "id": "e2GxAapG_TJH" + }, + "execution_count": 4, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.utils.logger import printc\n", + "\n", + "printc(\"All logging examples are done. Feeling green!\", color=\"green\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Yk4oiBFE_asG", + "outputId": "470e30dc-1b31-40c1-9e48-30754ae54b45" + }, + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[32m2024-11-28 13:39:41 - [:3:] - All logging examples are done. Feeling green!\u001b[0m\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "Set up all logs in one file\n", + "\n", + "Assume your source code is at src/task.py. You can log simply by:" + ], + "metadata": { + "id": "B8lmlT_9_nVP" + } + }, + { + "cell_type": "code", + "source": [ + "import logging\n", + "\n", + "log = logging.getLogger(__name__)\n", + "\n", + "class Task:\n", + " def __init__(self):\n", + " log.info(\"This is a user program child logger\")" + ], + "metadata": { + "id": "o_Ru1myM_c-J" + }, + "execution_count": 6, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "import logging\n", + "from adalflow.utils.logger import get_logger\n", + "\n", + "root_logger = get_logger(level=\"DEBUG\", save_dir=\"./logs\") # log to ./logs/lib.log\n", + "\n", + "# run code from the library components such as generator\n", + "# ....\n", + "\n", + "root_logger.info(\"This is the log in the main file\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "o7YPjEZk_ehg", + "outputId": "ad0f58e9-6f5c-4d00-e737-2fa1ad5ebd85" + }, + "execution_count": 7, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "2024-11-28 13:39:46 - - INFO - [:9:] - This is the log in the main file\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "Separate library and application logs" + ], + "metadata": { + "id": "Db1_Ob3X_gpe" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.utils.logger import get_logger\n", + "\n", + "app_logger = get_logger(name=\"my_app\", level=\"DEBUG\", save_dir=\"./logs\") # log to ./logs/my_app.log\n", + "\n", + "class Task:\n", + " def __init__(self):\n", + " app_logger.info(\"This is a user program child logger\")" + ], + "metadata": { + "id": "rQWuFnUc_gNm" + }, + "execution_count": 8, + "outputs": [] + } + ] +} diff --git a/tutorials/adalflow_function_calls.py b/tutorials/adalflow_function_calls.py new file mode 100644 index 00000000..184e2b88 --- /dev/null +++ b/tutorials/adalflow_function_calls.py @@ -0,0 +1,108 @@ +""" +This script demonstrates the usage of AdalFlow's Tool Helper functionality. +It can be run independently to showcase function calling capabilities. +""" + +from adalflow.components import Generator +from adalflow.components.model_client import OpenAIClient +from adalflow.utils import setup_env +from typing import List, Dict +import json + + +def setup_generator(): + """Initialize and configure the Generator with OpenAI client.""" + setup_env() + generator = Generator( + model_client=OpenAIClient(), + model_kwargs={"model": "gpt-3.5-turbo", "temperature": 0, "max_tokens": 1000}, + ) + return generator + + +def define_tools() -> List[Dict]: + """Define the available tools/functions that can be called.""" + return [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get the weather in a location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, + }, + "required": ["location", "unit"], + }, + }, + } + ] + + +def get_weather(location: str, unit: str) -> str: + """Mock function to simulate weather data retrieval.""" + # This is a mock implementation + weather_data = { + "San Francisco, CA": {"celsius": 20, "fahrenheit": 68}, + "New York, NY": {"celsius": 22, "fahrenheit": 72}, + } + + if location in weather_data: + temp = weather_data[location][unit] + return f"The temperature in {location} is {temp}°{'C' if unit == 'celsius' else 'F'}" + return f"Weather data not available for {location}" + + +def process_function_calls(generator: Generator, query: str): + """Process user query and handle any function calls.""" + # Get the response from the model + response = generator.generate(prompt_kwargs={"query": query}, tools=define_tools()) + + # Check if the response includes a function call + if hasattr(response, "tool_calls") and response.tool_calls: + for tool_call in response.tool_calls: + if tool_call.function.name == "get_weather": + # Parse the function arguments + args = json.loads(tool_call.function.arguments) + + # Call the function with the provided arguments + weather_result = get_weather(args["location"], args["unit"]) + + # Generate final response incorporating the function result + final_response = generator.generate( + prompt_kwargs={"query": query}, + tools=define_tools(), + tool_results=[ + {"tool_call_id": tool_call.id, "output": weather_result} + ], + ) + return final_response + + return response + + +def main(): + """Main function to demonstrate tool helper functionality.""" + # Initialize generator + generator = setup_generator() + + # Example queries + queries = [ + "What's the weather like in San Francisco?", + "Tell me the temperature in New York in Celsius", + ] + + # Process each query + for query in queries: + print(f"\nQuery: {query}") + response = process_function_calls(generator, query) + print(f"Response: {response}") + + +if __name__ == "__main__": + main() diff --git a/tutorials/adalflow_logger.py b/tutorials/adalflow_logger.py new file mode 100644 index 00000000..e4c4bb7e --- /dev/null +++ b/tutorials/adalflow_logger.py @@ -0,0 +1,143 @@ +""" +This script demonstrates the usage of AdalFlow's Logger functionality. +It can be run independently to showcase logging capabilities. +""" + +from adalflow.components import Generator +from adalflow.components.model_client import OpenAIClient +from adalflow.utils import setup_env +from adalflow.utils.logger import get_logger +import logging +from typing import Dict, Any +import json + + +def setup_logging(log_file: str = "adalflow.log") -> logging.Logger: + """ + Initialize and configure the logger. + + Args: + log_file: Name of the log file + + Returns: + Configured logger instance + """ + logger = get_logger(__name__) + + # Add file handler if not already present + if not any(isinstance(handler, logging.FileHandler) for handler in logger.handlers): + file_handler = logging.FileHandler(log_file) + file_handler.setFormatter( + logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + ) + logger.addHandler(file_handler) + + return logger + + +def setup_generator() -> Generator: + """ + Initialize and configure the Generator with OpenAI client. + + Returns: + Configured Generator instance + """ + setup_env() + return Generator( + model_client=OpenAIClient(), + model_kwargs={"model": "gpt-3.5-turbo", "temperature": 0, "max_tokens": 1000}, + ) + + +def process_query( + generator: Generator, query: str, logger: logging.Logger +) -> Dict[str, Any]: + """ + Process a query using the generator and log the interaction. + + Args: + generator: The configured Generator instance + query: User query to process + logger: Logger instance for recording the interaction + + Returns: + Dictionary containing the query and response + """ + logger.info(f"Processing query: {query}") + + try: + # Generate response + response = generator.generate(prompt_kwargs={"query": query}) + + # Log successful response + logger.info(f"Generated response: {response}") + + return {"query": query, "response": str(response), "status": "success"} + + except Exception as e: + # Log error if generation fails + logger.error(f"Error processing query: {str(e)}") + return {"query": query, "response": None, "status": "error", "error": str(e)} + + +def analyze_logs(log_file: str, logger: logging.Logger) -> Dict[str, int]: + """ + Analyze the log file to gather statistics. + + Args: + log_file: Path to the log file + logger: Logger instance for recording the analysis + + Returns: + Dictionary containing log statistics + """ + stats = {"total_queries": 0, "successful_queries": 0, "failed_queries": 0} + + try: + with open(log_file, "r") as f: + for line in f: + if "Processing query:" in line: + stats["total_queries"] += 1 + if "Generated response:" in line: + stats["successful_queries"] += 1 + if "Error processing query:" in line: + stats["failed_queries"] += 1 + + logger.info(f"Log analysis complete: {json.dumps(stats, indent=2)}") + return stats + + except Exception as e: + logger.error(f"Error analyzing logs: {str(e)}") + return stats + + +def main(): + """Main function to demonstrate logger functionality.""" + # Setup + log_file = "adalflow.log" + logger = setup_logging(log_file) + generator = setup_generator() + + # Example queries + queries = [ + "What is artificial intelligence?", + "Explain the concept of machine learning.", + "Tell me about neural networks.", + ] + + # Process queries + results = [] + for query in queries: + result = process_query(generator, query, logger) + results.append(result) + print(f"\nQuery: {query}") + print(f"Response: {result['response']}") + + # Analyze logs + stats = analyze_logs(log_file, logger) + print("\nLog Analysis:") + print(json.dumps(stats, indent=2)) + + +if __name__ == "__main__": + main() From 5b699b5a460fd7b78d9297464f92cf1e9c68a08e Mon Sep 17 00:00:00 2001 From: lu-ny Date: Thu, 28 Nov 2024 13:59:48 -0500 Subject: [PATCH 21/40] Added a call method to the unfinished adalflow.components.memory.memory.Memory class. This method returns a formatted conversation history, handles empty conversations, and formats user and assistant messages.Additionally, added add_dialog_turn() method to handle new back-and-forth messages. This adds an id to each dialog turn for tracking and appends it to the current_conversation list. Finally, added 3 new tests for memory -empty memory, single dialog turn, and multiple dialog turn tests. Also updated the chatbot usage notebook to replace the deprecated class with the updated one. --- adalflow/adalflow/components/memory/memory.py | 66 ++++++++++- adalflow/tests/test_memory.py | 23 ++++ use_cases/question_answering/chatbot.ipynb | 103 ++---------------- 3 files changed, 94 insertions(+), 98 deletions(-) create mode 100644 adalflow/tests/test_memory.py diff --git a/adalflow/adalflow/components/memory/memory.py b/adalflow/adalflow/components/memory/memory.py index a25ab2b5..f0819701 100644 --- a/adalflow/adalflow/components/memory/memory.py +++ b/adalflow/adalflow/components/memory/memory.py @@ -1,23 +1,77 @@ -"""Memory for user-assistant conversations. [Not completed] +"""Memory component for user-assistant conversations. Memory can include data modeling, in-memory data storage, local file data storage, cloud data persistence, data pipeline, data retriever. It is itself an LLM application and different use cases can do it differently. +This component handles the storage and retrieval of conversation history between users +and assistants. It provides local memory experience with the ability to format and +return conversation history. -This implementation covers the minimal and local memory experience for the user-assistant conversation. +Attributes: + current_conversation (Conversation): Stores the current active conversation. + turn_db (LocalDB): Database for storing all conversation turns. + conver_db (LocalDB): Database for storing complete conversations. """ +from uuid import uuid4 +from adalflow.core.component import Component +from adalflow.core.db import LocalDB from adalflow.core.types import ( Conversation, + DialogTurn, + UserQuery, + AssistantResponse, ) -from adalflow.core.db import LocalDB -from adalflow.core.component import Component - class Memory(Component): def __init__(self, turn_db: LocalDB = None): + """Initialize the Memory component. + + Args: + turn_db (LocalDB, optional): Database for storing conversation turns. + Defaults to None, in which case a new LocalDB is created. + """ super().__init__() - self.current_convesation = Conversation() + self.current_conversation = Conversation() self.turn_db = turn_db or LocalDB() # all turns self.conver_db = LocalDB() # a list of conversations + + def call(self) -> str: + """Returns the current conversation history as a formatted string. + + Returns: + str: Formatted conversation history with alternating user and assistant messages. + Returns empty string if no conversation history exists. + """ + if not self.current_conversation.dialog_turns: + return "" + + formatted_history = [] + for turn in self.current_conversation.dialog_turns.values(): + formatted_history.extend( + [ + f"User: {turn.user_query.query_str}", + f"Assistant: {turn.assistant_response.response_str}", + ] + ) + return "\n".join(formatted_history) + + def add_dialog_turn(self, user_query: str, assistant_response: str): + """Add a new dialog turn to the current conversation. + + Args: + user_query (str): The user's input message. + assistant_response (str): The assistant's response message. + """ + dialog_turn = DialogTurn( + id=str(uuid4()), + user_query=UserQuery(query_str=user_query), + assistant_response=AssistantResponse(response_str=assistant_response), + ) + + self.current_conversation.append_dialog_turn(dialog_turn) + + self.turn_db.add( + {"user_query": user_query, "assistant_response": assistant_response} + ) diff --git a/adalflow/tests/test_memory.py b/adalflow/tests/test_memory.py new file mode 100644 index 00000000..16b3024e --- /dev/null +++ b/adalflow/tests/test_memory.py @@ -0,0 +1,23 @@ +from adalflow.components.memory.memory import Memory + + +def test_empty_memory(): + memory = Memory() + assert memory() == "" + + +def test_add_dialog_turn(): + memory = Memory() + memory.add_dialog_turn("Hello", "Hi! How can I help you?") + expected = "User: Hello\nAssistant: Hi! How can I help you?" + assert memory() == expected + + +def test_multiple_turns(): + memory = Memory() + memory.add_dialog_turn("Hello", "Hi!") + memory.add_dialog_turn("How are you?", "I'm good!") + expected = ( + "User: Hello\n" "Assistant: Hi!\n" "User: How are you?\n" "Assistant: I'm good!" + ) + assert memory() == expected diff --git a/use_cases/question_answering/chatbot.ipynb b/use_cases/question_answering/chatbot.ipynb index f326cd1e..fc08647c 100644 --- a/use_cases/question_answering/chatbot.ipynb +++ b/use_cases/question_answering/chatbot.ipynb @@ -16,19 +16,18 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "# Import needed modules from LightRAG\n", - "from adalflow.core.component import Component\n", + "# Import needed modules from LightRAGfrom adalflow.core.component import Component\n", "from adalflow.core.generator import Generator\n", - "from adalflow.core.memory import Memory" + "from adalflow.components.memory.memory import Memory" ] }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -39,78 +38,9 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ChatBot(\n", - " (generator): Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, model_type=ModelType.LLM\n", - " (system_prompt): Prompt(\n", - " template: {# task desc #}\n", - " {% if task_desc_str %}\n", - " {{task_desc_str}}\n", - " {% else %}\n", - " Answer user query.\n", - " {% endif %}\n", - " {# output format #}\n", - " {% if output_format_str %}\n", - " \n", - " {{output_format_str}}\n", - " \n", - " {% endif %}\n", - " {# tools #}\n", - " {% if tools_str %}\n", - " \n", - " {{tools_str}}\n", - " \n", - " {% endif %}\n", - " {# example #}\n", - " {% if examples_str %}\n", - " \n", - " {{examples_str}}\n", - " \n", - " {% endif %}\n", - " {# chat history #}\n", - " {% if chat_history_str %}\n", - " \n", - " {{chat_history_str}}\n", - " \n", - " {% endif %}\n", - " {#contex#}\n", - " {% if context_str %}\n", - " \n", - " {{context_str}}\n", - " \n", - " {% endif %}\n", - " {# steps #}\n", - " {% if steps_str %}\n", - " \n", - " {{steps_str}}\n", - " \n", - " {% endif %}\n", - " {% if input_str %}\n", - " \n", - " {{input_str}}\n", - " \n", - " {% endif %}\n", - " {% if output_str %}\n", - " \n", - " {{output_str}}\n", - " \n", - " {% endif %}\n", - " , prompt_variables: ['context_str', 'task_desc_str', 'tools_str', 'chat_history_str', 'input_str', 'output_str', 'output_format_str', 'steps_str', 'examples_str']\n", - " )\n", - " (model_client): OpenAIClient()\n", - " )\n", - " (chat_history): Memory()\n", - ")\n" - ] - } - ], + "outputs": [], "source": [ "# Build the ChatBot pipeline\n", "class ChatBot(Component):\n", @@ -149,20 +79,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Welcome to the ChatBot. Type anything to chat. Type 'exit' to end.\n", - "ChatBot: GeneratorOutput(data=\"Learning to drive can be an exciting and rewarding experience. Here are some general steps to help you get started on your journey to becoming a safe and confident driver:\\n\\n1. Get a learner's permit: In most places, you will need to obtain a learner's permit before you can start learning how to drive. Check with your local department of motor vehicles for the specific requirements in your area.\\n\\n2. Take a driver's education course: Consider enrolling in a driver's education course to learn the rules of the road and get some hands-on practice with a qualified instructor.\\n\\n3. Practice with a licensed driver: Before you can get your driver's license, you will need to log a certain number of supervised driving hours with a licensed adult. This is a great opportunity to get comfortable behind the wheel and practice your skills.\\n\\n4. Study the driver's manual: Make sure to familiarize yourself with the driver's manual for your state or country. It contains important information about traffic laws, road signs, and safe driving practices.\\n\\n5. Practice, practice, practice: The more time you spend behind the wheel, the more confident you will become as a driver. Practice in a variety of different conditions and situations to hone your skills.\\n\\n6. Take a driving test: Once you feel ready, schedule a driving test with your local department of motor vehicles. If you pass the test, you will receive your driver's license and be able to drive independently.\\n\\nRemember, learning to drive takes time and practice, so be patient with yourself and don't be afraid to ask for help if you need it. Good luck on your journey to becoming a licensed driver!\", error=None, raw_response=\"Learning to drive can be an exciting and rewarding experience. Here are some general steps to help you get started on your journey to becoming a safe and confident driver:\\n\\n1. Get a learner's permit: In most places, you will need to obtain a learner's permit before you can start learning how to drive. Check with your local department of motor vehicles for the specific requirements in your area.\\n\\n2. Take a driver's education course: Consider enrolling in a driver's education course to learn the rules of the road and get some hands-on practice with a qualified instructor.\\n\\n3. Practice with a licensed driver: Before you can get your driver's license, you will need to log a certain number of supervised driving hours with a licensed adult. This is a great opportunity to get comfortable behind the wheel and practice your skills.\\n\\n4. Study the driver's manual: Make sure to familiarize yourself with the driver's manual for your state or country. It contains important information about traffic laws, road signs, and safe driving practices.\\n\\n5. Practice, practice, practice: The more time you spend behind the wheel, the more confident you will become as a driver. Practice in a variety of different conditions and situations to hone your skills.\\n\\n6. Take a driving test: Once you feel ready, schedule a driving test with your local department of motor vehicles. If you pass the test, you will receive your driver's license and be able to drive independently.\\n\\nRemember, learning to drive takes time and practice, so be patient with yourself and don't be afraid to ask for help if you need it. Good luck on your journey to becoming a licensed driver!\")\n", - "ChatBot: GeneratorOutput(data=\"To get a driver's license in California, you can follow these general steps:\\n\\n1. Obtain a learner's permit: Applicants must be at least 15 and a half years old to apply for a learner's permit in California. You will need to pass a written knowledge test and a vision test to obtain your permit.\\n\\n2. Complete driver's education: If you are under 17 and a half years old, you must complete a driver's education course before applying for a provisional permit.\\n\\n3. Practice driving: With your learner's permit, you can start practicing driving with a licensed adult who is at least 25 years old.\\n\\n4. Apply for a provisional license: After holding your learner's permit for at least 6 months and completing at least 50 hours of practice (including 10 hours at night), you can apply for a provisional license.\\n\\n5. Pass the driving test: Schedule and pass a driving test at a local DMV office. Make sure to bring all required documents and fees.\\n\\n6. Receive your driver's license: If you pass the driving test, you will receive your provisional driver's license. With this license, you will have certain restrictions, such as driving with no passengers under 20 years old for the first year.\\n\\nRemember to check with the California Department of Motor Vehicles (DMV) for the most up-to-date and specific requirements for obtaining a driver's license in the state. Good luck with your journey to becoming a licensed driver in California!\", error=None, raw_response=\"To get a driver's license in California, you can follow these general steps:\\n\\n1. Obtain a learner's permit: Applicants must be at least 15 and a half years old to apply for a learner's permit in California. You will need to pass a written knowledge test and a vision test to obtain your permit.\\n\\n2. Complete driver's education: If you are under 17 and a half years old, you must complete a driver's education course before applying for a provisional permit.\\n\\n3. Practice driving: With your learner's permit, you can start practicing driving with a licensed adult who is at least 25 years old.\\n\\n4. Apply for a provisional license: After holding your learner's permit for at least 6 months and completing at least 50 hours of practice (including 10 hours at night), you can apply for a provisional license.\\n\\n5. Pass the driving test: Schedule and pass a driving test at a local DMV office. Make sure to bring all required documents and fees.\\n\\n6. Receive your driver's license: If you pass the driving test, you will receive your provisional driver's license. With this license, you will have certain restrictions, such as driving with no passengers under 20 years old for the first year.\\n\\nRemember to check with the California Department of Motor Vehicles (DMV) for the most up-to-date and specific requirements for obtaining a driver's license in the state. Good luck with your journey to becoming a licensed driver in California!\")\n", - "Goodbye!\n" - ] - } - ], + "outputs": [], "source": [ "chatbot.call()" ] @@ -170,9 +89,9 @@ ], "metadata": { "kernelspec": { - "display_name": "lightrag-project", + "display_name": ".venv", "language": "python", - "name": "light-rag-project" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -184,7 +103,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.11.9" } }, "nbformat": 4, From 3072e0260574cc5b055e7884737b006967455734 Mon Sep 17 00:00:00 2001 From: fm1320 Date: Fri, 29 Nov 2024 14:22:45 +0000 Subject: [PATCH 22/40] trace tutorials lint --- docs/source/tutorials/logging_tracing.rst | 14 ++ notebooks/tutorials/adalflow_tracing.ipynb | 183 +++++++++++++++++++++ tutorials/adalflow_tracing.py | 88 ++++++++++ 3 files changed, 285 insertions(+) create mode 100644 notebooks/tutorials/adalflow_tracing.ipynb create mode 100644 tutorials/adalflow_tracing.py diff --git a/docs/source/tutorials/logging_tracing.rst b/docs/source/tutorials/logging_tracing.rst index 26d8f605..7cf5ba5f 100644 --- a/docs/source/tutorials/logging_tracing.rst +++ b/docs/source/tutorials/logging_tracing.rst @@ -1,3 +1,17 @@ +```rst +.. raw:: html + + +``` + .. _logging_tracing: Tracing diff --git a/notebooks/tutorials/adalflow_tracing.ipynb b/notebooks/tutorials/adalflow_tracing.ipynb new file mode 100644 index 00000000..014c1b5e --- /dev/null +++ b/notebooks/tutorials/adalflow_tracing.ipynb @@ -0,0 +1,183 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Tracing\n", + "\n", + "In particular, we provide two tracing methods to help you develop and improve the Generator:\n", + "\n", + "1. Trace the history change(states) on prompt during your development process. Developers typically go through a long process of prompt optimization and it is frustrating to lose track of the prompt changes when your current change actually makes the performance much worse.\n" + ], + "metadata": { + "id": "lLGpv1fLLIjF" + } + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "sfKEfaYC3Go7" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "\n", + "clear_output()\n" + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "os.environ['GROQ_API_KEY'] = groq_api_key\n", + "\n", + "print(\"API keys have been set.\")\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "-4c_AGBt3PlR", + "outputId": "85aba038-ee9c-463d-bdbd-027cbfff0094" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "Please enter your GROQ API key: ··········\n", + "API keys have been set.\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "We created a GeneratorStateLogger to handle the logging and saving into json files. To further simplify developers’s process, we provides a class decorator trace_generator_states where a single line of code can be added to any of your task component. It will automatically track any attributes of type Generator." + ], + "metadata": { + "id": "yWi2uEiE6UIf" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.tracing import trace_generator_states\n", + "from adalflow.core import Component, Generator\n", + "import adalflow as adal\n", + "from adalflow.components.model_client import OpenAIClient\n", + "\n", + "template_doc = r\"\"\" You are a doctor User: {{input_str}}\"\"\"\n", + "\n", + "@trace_generator_states()\n", + "class DocQA(adal.Component):\n", + " def __init__(self):\n", + " super(DocQA, self).__init__()\n", + " self.generator = Generator(\n", + " template=template_doc,\n", + " model_client=OpenAIClient(),\n", + " model_kwargs={\"model\": \"gpt-4o-mini\"},\n", + " )\n", + "\n", + " def call(self, query: str) -> str:\n", + " return self.doc(prompt_kwargs={\"input_str\": query}).data\n" + ], + "metadata": { + "id": "qk9pkcCVzdek" + }, + "execution_count": 13, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "Here is the folder structer of where the trace is generated as a .json file and also an example output below" + ], + "metadata": { + "id": "LAZUSnYn-lnI" + } + }, + { + "cell_type": "markdown", + "source": [ + "![image.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAj4AAADGCAYAAADSbIrxAAAMTGlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnltSIQQIREBK6E0QkRJASggtgPQiiEpIAoQSY0JQsaOLCq5dRLCiqyAuuroCstiwK4ti74sFBWVdLNiVNyGALvvK9+b75s5//znzzzln5pYBgN7Ol0pzUE0AciV5sphgf9aEpGQWqROoAV3ABAZgFF8gl3KiosIBLIPt38vb6wBRtlcclFr/7P+vRUsokgsAQKIgThPKBbkQ/woA3iSQyvIAIEohbz49T6rEayHWkUEHIa5S4gwVblLiNBW+1G8TF8OF+DEAZHU+X5YBgEYP5Fn5ggyoQ4fRAieJUCyB2A9in9zcqUKI50NsA23gnHSlPjvtO52Mv2mmDWny+RlDWBVLfyEHiOXSHP7M/zMd/7vk5igG57CGVT1TFhKjjBnm7XH21DAlVof4vSQtIhJibQBQXCzst1diZqYiJF5lj9oI5FyYM7jOAB0nz4nlDfAxQn5AGMSGEKdLciLCB2wK08VBShuYP7RMnMeLg1gP4iqRPDB2wOaYbGrM4LzX02VczgDfyZf1+6DU/6rIjueo9DHtTBFvQB9zLMiMS4SYCnFAvjghAmINiCPk2bFhAzYpBZnciEEbmSJGGYsFxDKRJNhfpY+VpsuCYgbsd+fKB2PHjmWKeRED+HJeZlyIKlfYYwG/338YC9YjknDiB3VE8gnhg7EIRQGBqthxskgSH6vicT1pnn+MaixuJ82JGrDH/UU5wUreDOI4eX7s4Nj8PLg5Vfp4kTQvKk7lJ16exQ+NUvmD7wPhgAsCAAsoYE0DU0EWELd213fDO1VPEOADGcgAIuAwwAyOSOzvkcBrLCgAf0IkAvKhcf79vSKQD/kvw1glJx7iVFcHkD7Qp1TJBk8gzgVhIAfeK/qVJEMeJIDHkBH/wyM+rAIYQw6syv5/zw+y3xgOZMIHGMXgjCz6oCUxkBhADCEGEW1xA9wH98LD4dUPVmecjXsMxvHNnvCE0EZ4SLhGaCfcmiIulA3zcjxoh/pBA/lJ+z4/uBXUdMX9cW+oDpVxJm4AHHAXOA8H94Uzu0KWO+C3MiusYdp/i+C7FRqwozhRUMoIih/FZvhIDTsN1yEVZa6/z4/K17ShfHOHeobPz/0u+0LYhg23xJZgB7Az2HHsHNaE1QMWdhRrwFqww0o8tOMe9++4wdli+v3JhjrD98y3lVVmUu5U49Tl9FnVlyeakad8GLlTpTNl4ozMPBYHfjFELJ5E4DiK5ezk7AKA8vujer29ju7/riDMlm/cwj8A8D7a19f32zcu9CgAv7jDV8Khb5wNG35a1AA4e0igkOWrOFx5IcA3Bx0+ffrAGJgDGxiPM3ADXsAPBIJQEAniQBKYDL3PhPtcBqaD2WABKAIlYCVYB8rBFrAdVIGfwX5QD5rAcXAaXACXwDVwB+6eDvAc9IC34BOCICSEhjAQfcQEsUTsEWeEjfgggUg4EoMkIalIBiJBFMhsZCFSgqxGypFtSDXyC3IIOY6cQ9qQW8gDpAt5hXxEMVQd1UGNUCt0NMpGOWgYGodOQjPQaWgBughdjpahlegetA49jl5Ar6Ht6HO0FwOYGsbETDEHjI1xsUgsGUvHZNhcrBgrxSqxWqwRrvMVrB3rxj7gRJyBs3AHuIND8HhcgE/D5+LL8HK8Cq/DT+JX8Ad4D/6VQCMYEuwJngQeYQIhgzCdUEQoJewkHCScgs9SB+EtkUhkEq2J7vBZTCJmEWcRlxE3EfcSjxHbiI+IvSQSSZ9kT/ImRZL4pDxSEWkDaQ/pKOkyqYP0nqxGNiE7k4PIyWQJuZBcSt5NPkK+TH5K/kTRpFhSPCmRFCFlJmUFZQelkXKR0kH5RNWiWlO9qXHULOoCahm1lnqKepf6Wk1NzUzNQy1aTaw2X61MbZ/aWbUHah/UtdXt1LnqKeoK9eXqu9SPqd9Sf02j0axofrRkWh5tOa2adoJ2n/Zeg6HhqMHTEGrM06jQqNO4rPGCTqFb0jn0yfQCein9AP0ivVuTommlydXka87VrNA8pHlDs1eLoTVGK1IrV2uZ1m6tc1qd2iRtK+1AbaH2Iu3t2ie0HzEwhjmDyxAwFjJ2ME4xOnSIOtY6PJ0snRKdn3VadXp0tXVddBN0Z+hW6B7WbWdiTCsmj5nDXMHcz7zO/DjCaARnhGjE0hG1Iy6PeKc3Us9PT6RXrLdX75reR32WfqB+tv4q/Xr9ewa4gZ1BtMF0g80Gpwy6R+qM9BopGFk8cv/I24aooZ1hjOEsw+2GLYa9RsZGwUZSow1GJ4y6jZnGfsZZxmuNjxh3mTBMfEzEJmtNjpo8Y+myOKwcVhnrJKvH1NA0xFRhus201fSTmbVZvFmh2V6ze+ZUc7Z5uvla82bzHgsTi/EWsy1qLG5bUizZlpmW6y3PWL6zsrZKtFpsVW/Vaa1nzbMusK6xvmtDs/G1mWZTaXPVlmjLts223WR7yQ61c7XLtKuwu2iP2rvZi+032beNIozyGCUZVTnqhoO6A8ch36HG4YEj0zHcsdCx3vHFaIvRyaNXjT4z+quTq1OO0w6nO2O0x4SOKRzTOOaVs52zwLnC+epY2tigsfPGNox96WLvInLZ7HLTleE63nWxa7PrFzd3N5lbrVuXu4V7qvtG9xtsHXYUexn7rAfBw99jnkeTxwdPN888z/2ef3k5eGV77fbqHGc9TjRux7hH3mbefO9t3u0+LJ9Un60+7b6mvnzfSt+HfuZ+Qr+dfk85tpwszh7OC38nf5n/Qf93XE/uHO6xACwgOKA4oDVQOzA+sDzwfpBZUEZQTVBPsGvwrOBjIYSQsJBVITd4RjwBr5rXE+oeOif0ZJh6WGxYedjDcLtwWXjjeHR86Pg14+9GWEZIIuojQSQvck3kvSjrqGlRv0UTo6OiK6KfxIyJmR1zJpYROyV2d+zbOP+4FXF34m3iFfHNCfSElITqhHeJAYmrE9snjJ4wZ8KFJIMkcVJDMik5IXlncu/EwInrJnakuKYUpVyfZD1pxqRzkw0m50w+PIU+hT/lQCohNTF1d+pnfiS/kt+bxkvbmNYj4ArWC54L/YRrhV0ib9Fq0dN07/TV6Z0Z3hlrMroyfTNLM7vFXHG5+GVWSNaWrHfZkdm7svtyEnP25pJzU3MPSbQl2ZKTU42nzpjaJrWXFknbp3lOWzetRxYm2ylH5JPkDXk68Ee/RWGj+EHxIN8nvyL//fSE6QdmaM2QzGiZaTdz6cynBUEFP83CZwlmNc82nb1g9oM5nDnb5iJz0+Y2zzOft2hex/zg+VULqAuyF/xe6FS4uvDNwsSFjYuMFs1f9OiH4B9qijSKZEU3Fnst3rIEXyJe0rp07NINS78WC4vPlziVlJZ8XiZYdv7HMT+W/di3PH156wq3FZtXEldKVl5f5buqarXW6oLVj9aMX1O3lrW2eO2bdVPWnSt1Kd2ynrpesb69LLysYYPFhpUbPpdnll+r8K/Yu9Fw49KN7zYJN13e7Le5dovRlpItH7eKt97cFrytrtKqsnQ7cXv+9ic7Enac+Yn9U/VOg50lO7/skuxqr4qpOlntXl2923D3ihq0RlHTtSdlz6WfA35uqHWo3baXubdkH9in2Pfsl9Rfru8P2998gH2g9lfLXzceZBwsrkPqZtb11GfWtzckNbQdCj3U3OjVePA3x992NZk2VRzWPbziCPXIoiN9RwuO9h6THus+nnH8UfOU5jsnJpy4ejL6ZOupsFNnTwedPnGGc+boWe+zTec8zx06zz5ff8HtQl2La8vB311/P9jq1lp30f1iwyWPS41t49qOXPa9fPxKwJXTV3lXL1yLuNZ2Pf76zRspN9pvCm923sq59fJ2/u1Pd+bfJdwtvqd5r/S+4f3KP2z/2Nvu1n74QcCDloexD+88Ejx6/lj++HPHoie0J6VPTZ5Wdzp3NnUFdV16NvFZx3Pp80/dRX9q/bnxhc2LX//y+6ulZ0JPx0vZy75Xy17rv971xuVNc29U7/23uW8/vSt+r/++6gP7w5mPiR+ffpr+mfS57Ivtl8avYV/v9uX29Un5Mn7/rwAGlEebdABe7QKAlgQAA54bqRNV58P+gqjOtP0I/CesOkP2FzcAauE/fXQ3/Lu5AcC+HQBYQX16CgBRNADiPAA6duxQHTzL9Z87lYUIzwZbI7+k5aaBf1NUZ9Lv/B7eAqWqCxje/gsy+IMtImMZLAAAAJZlWElmTU0AKgAAAAgABQESAAMAAAABAAEAAAEaAAUAAAABAAAASgEbAAUAAAABAAAAUgEoAAMAAAABAAIAAIdpAAQAAAABAAAAWgAAAAAAAACQAAAAAQAAAJAAAAABAAOShgAHAAAAEgAAAISgAgAEAAAAAQAAAj6gAwAEAAAAAQAAAMYAAAAAQVNDSUkAAABTY3JlZW5zaG90r8HhGAAAAAlwSFlzAAAWJQAAFiUBSVIk8AAAAttpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDYuMC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6ZXhpZj0iaHR0cDovL25zLmFkb2JlLmNvbS9leGlmLzEuMC8iCiAgICAgICAgICAgIHhtbG5zOnRpZmY9Imh0dHA6Ly9ucy5hZG9iZS5jb20vdGlmZi8xLjAvIj4KICAgICAgICAgPGV4aWY6VXNlckNvbW1lbnQ+U2NyZWVuc2hvdDwvZXhpZjpVc2VyQ29tbWVudD4KICAgICAgICAgPGV4aWY6UGl4ZWxYRGltZW5zaW9uPjU3NDwvZXhpZjpQaXhlbFhEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj4xOTg8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8dGlmZjpSZXNvbHV0aW9uVW5pdD4yPC90aWZmOlJlc29sdXRpb25Vbml0PgogICAgICAgICA8dGlmZjpYUmVzb2x1dGlvbj4xNDQvMTwvdGlmZjpYUmVzb2x1dGlvbj4KICAgICAgICAgPHRpZmY6WVJlc29sdXRpb24+MTQ0LzE8L3RpZmY6WVJlc29sdXRpb24+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgrknrQzAAA14ElEQVR4Ae2dB5gURfqHC8EEKCpiDijqKYpZxIQohsOcFfGMp+cJio9i4Mz55MR0KuZMMGBWFEyYzqyYsxjAiIpgRvnPW3++tra3Z3ZC727Pzu97nt1OVdXVb/dM/6a+r6pa9ejRY6aTiYAIiIAIiIAIiEANEJitBq5RlygCIiACIiACIiACnoCEjx4EERABERABERCBmiEg4VMzt1oXKgIiIAIiIAIiIOGjZ0AEREAEREAERKBmCEj41Myt1oWKgAiIgAiIgAhI+OgZEAEREAEREAERqBkCEj41c6t1oSIgAiIgAiIgAhI+egZEQAREQAREQARqhoCET83cal2oCIiACIiACIiAhI+eAREQAREQAREQgZohIOFTM7daFyoCIiACIiACItCmlhF06NDB9enTx3Xu3NnNM888JaF4+OGH3UMPPVRSHiUWAREQAREQARFoXgI1K3wQPQMGDHBt27Yt6w5suummPp/ET1n4lEkEREAEREAEmoVAzbq6aOkpV/TYnUL89O7d2za1FAEREAEREAERyDiBmhU+uLfSMImfNCiqDBEQAREQARFoGgI16+oqNaan0O1A/Jjrq1C6QsemTZvmJk6c6MaMGeOmTp1aKKmOiYAIiIAIiIAIlEmgZlt8yuTVaNkQYt26dfNxR8QfyURABERABERABNInIOGTPtOKSiTuiPgjmQiIgAiIgAiIQPoEatbVlT7K9EqsJP6oZ8+erl+/fr4yjz7yiBs5alR6FVNJIiACIiACIlDlBCR8MngDK4k/WmKJJdwCCyzgr6rLcstl8OpUJREQAREQARFoPgJydTUfe51ZBERABERABESgiQmoxaeJgTfW6ZZeemk322yzuY4dO0anaN++vVtmmWX89qeffup+++03v77kkku6Nm3auF9++cVNnjzZtWvXzm2wwQZu8cUXd4899ph7//33ozJYIe1qq63mVl55Zffdd9+5Dz74wL355pvu999/r5MuaWPOOed0a621lltqqaV8/d5++233xhtvuJ9++ikpeZ191GudddbxeadMmeJefvllN2nSpDppkjaIk6KuXbp08eekvpzz+++/T0qufSIgAiIgAjVEQMKnhdzsc845p96V8OIfMmSI33/VVVe5+++/36+fe+65fvnzzz+7xx9/3G2++eZR3k6dOkV5FllkEXfKKadErrMoUW5lxowZbsSIEe7uu+8Od9dZP/TQQ92GG27oxUd4YObMmV5gDRs2LK94Ovroo93aa6/tWrVqFWb14o1rffHFF+vsZwOBdsghh3gRhwiM24QJE9yFF14oARQHo20REAERqCECFQsfRi6udAwb4635r4xE0yznmmuuOqInPCtB0oiI1q1bh7ujdUTG3nvv7RAx99xzT7TfVvr37+8oI8kQMxtvvLHrutJK7pBcurgde+yxvpUovp/t2Wef3Q0ePNiLrttvv71OkjPPPDNq4apzYNYGrVbnDh3qDvrHP9wff/yRlET7REAEREAEWjiB1rlg2JMrucYPP/zQ/yo3l0q5ZTW16Mn6VBPwKMVef/1134qCmCHAGWNAxAsuuMCNHz/et5DQSoPttttufmn/cHlxH1977TX31FNPeXfSGWec4UUGab7++msvbq688kpHK9Fiiy3mcGFhq666qrvtttu8API7cv923HFHt+2229qmL/Paa6919957r2+Vodca4gdXFq65559/PkqLmOrVq5ffRpzceeedjvPSwrPssstGk8l27drVtzaZu41Wq8022ywqhzJHjhzp880///yRCxA+Cy64oHvuueeitFoRAREQARGoHQIVt/iAyibqLLflp6lFT0u8vcSwYAgRM0aARswUsi+++MIPmhimIa7GhA0jStN6Yy0kw4cPd/fdd5+7/PLLfRZcSoiQ8Dx9+/aNigtdbOy85JJL3HvvvecOPPBAn4bYoksvvdSvI2xCwXTCCSe4d955xx/76KOP3AsvvOAuvvhit9BCC/mWqP3339/hLsPWXXddv+QfIu7ss8+OthF+++23n9tqq638vlVWWSU6phUREAEREIHaIlA/EKLM60f8lNpKwakkesoEnkI2xMwxxxxTr6RPPvnEt5bQYnL++edHoscSfvvtt+7zzz+3TbdSzmVltuKKK0ZxOQQTW1yRHWc5duxY9+OPP/pdtMBY9/0ddtghSvbqq69GoifamVu54ooros1QwORzyVni0aNHu7feesv/0TomEwEREAERqE0CqbT4GLpSW34keoxc8ywRPj/88EO9kyNqcF+ZEc9DTzB6Zs0777x+dxg83CaIA1pzzTUtm2+hiTZiK7jg6EWGmQgKBRTCh0DruIWCy+pCGlx0JoRwu5544onujjvucK+88oovAhFGC5JMBERABESgtgmkKnxAWaz4kejJ/oPXo0cPPwo0vbuKtVC80FU+nxGzE++ZNffcc0fJ99xzT8dfITN3HGkeyY1SzVQfCDSMec/4IwaIetDKNG7cuLy9yHwm/RMBERABEWjxBFJzdYWkGnJ7SfSEtLK5TlzPkUce6ULRQw8uAqQJhmY9ycIJVr/66qukJHn30bJUioVd3anXEUcc4cWNBXFTFi4wxNABBxzgiE9ab731SjmF0oqACIiACLQwAqW9aUq4+HwtPxI9JUBspqS4jKxnFVWghxSigUEQzXAbhYHUtp8BDhdddFG/SRByKcYAixarM2bMGEdgdSGzHl1hGoKu+aO1aqONNnLEHJlLjLIRR/GA6zC/1kVABERABFo2gUYTPmCLix+Jnup4mHbeeeeoooiesIdUdCDPCuLI3F0Ww5Mnab3diCZrYaI31xNPPFEvTbE7nn76accfRvf+k086yXWYbz6/Tc+xpKBrf1D/REAEREAEWjSBRhU+kDPxE19v0VSr/OIY58aMHl5Jxtg4ScbYQWbdu3e31XpLemfNN0uI0C2d1p3PPvssEj477bRTXuGDS2yOOeaIgqKJDbrmmmv8OQiUprzQEGP/vegid/zxx/vdSUHTYXqti4AIiIAItFwCjRLjE8eF+AkFUPy4ttMjQPyNWbkveFpezDbdZBNbjZaHH354FETMztlzIsSMVj2bEwxBkhSgvEmuTBM9nMtcWrfeeqsV48tnIMS44bZi7B7cVauvvro/zLxfv/76q3eT0TU+aTyp6dOnR0Xli0+KEmhFBERABESgxRJo9BafFksuoxf28ccfRzUj1uawww7zA/q99NJLdWJ0okQJK6QlNgbDPXT11Vf7AQppaWG/jbtjWS2Ghm0Cixn/hxGYMcQLrq8nn3zSj/pM7A2Tlpo9+OCDturH7WGQQjuOaCI9E6cSz0MvLaadsN5c/fr18xOXUgATmFrg8sEHH+y2335798wzz3hRRR7r6k5aBkOUiYAIiIAI1CYBCZ8Wdt8RLYgPRAq9ngjw5Y/pIUaNGlXU1TKGD1N6WHAyQsdEhRVAq4n1qmJm+NCYuJTWGAt+RiyZkArT0Qp40003hbv8gInEFDEtBsZozvzFDfHCnF5mTD5KfZmYlXqRP6nFCDY2Savl1VIEREAERKB2CDSJq6t2cDb/lfJivygXz5LU46mU2g0aNMj35oq7hSiX8XCYOsLMenHZNsvTTjvN965ibq+4MZUGriqbqiI8TvqBAwe6m2++OXKZhceJ4WGwQuoXXiPXjRB64IEHHK6vuHEdDGY4YMCAOqNOx9NpWwREQAREoGUTaJVzJSQPyNKyr9sxCWeW7bjjjquoesxizhxa7du39wP40UJi822VUjBdwJdffnk/sSeBy2GX9mLLwTWFmwrxMWHCBN8iVWxe4oRwU1F3hIvFDzWUnxYvWpyYEmPSpEmOIO1yrr+h8+i4CIiACIhAdRGQq6u67lfRtUUgIDIqNVpVmOOqEiPgOpyBvZSyaL0pZyZ1WoDiI0OXcl6lFQEREAERaJkEatbVZT2Jsnhbs1y3LPJSnURABERABESgWAI1K3zC8WaKhdVU6bJct6ZioPOIgAiIgAiIQGMQqFnhw5QINit4Y4Att0zqRN1kIiACIiACIiAC6RNonRvO/+T0i81+icSdECzLGDRt27aNxoZprprj3mKahhEjRjh6PclEQAREQAREQATSJ1CzvbrSR6kSRUAEREAEREAEsk6gZl1dWb8xqp8IiIAIiIAIiED6BCR80meqEkVABERABERABDJKQMInozdG1RIBERABERABEUifgIRP+kxVogiIgAiIgAiIQEYJSPhk9MaoWiIgAiIgAiIgAukTkPBJn6lKFAEREAEREAERyCgBCZ+M3hhVSwREQAREQAREIH0CEj7pM1WJIiACIiACIiACGSUg4ZPRG6NqiYAIiIAIiIAIpE9Awid9pipRBERABERABEQgowQkfDJ6Y1QtERABERABERCB9AlI+KTPVCWKgAiIgAiIgAhklICET0ZvjKolAiIgAiIgAiKQPgEJn/SZqkQREAEREAEREIGMEpDwyeiNUbVEQAREQAREQATSJyDhkz5TlSgCIiACIiACIpBRAhI+Gb0xqpYIiIAIiIAIiED6BCR80meqEkVABERABERABDJKQMInozdG1RIBERABERABEUifgIRP+kxVogiIgAiIgAiIQEYJSPhk9MaoWiIgAiIgAiIgAukTkPBJn6lKFAEREAEREAERyCgBCZ+M3hhVSwREQAREQAREIH0CEj7pM1WJIiACIiACIiACGSUg4ZPRG6NqiYAIiIAIiIAIpE9Awid9pipRBERABERABEQgowQkfDJ6Y1QtERABERABERCB9AlI+KTPVCWKgAiIgAiIgAhklICET0ZvjKolAiIgAiIgAiKQPgEJn/SZqkQREAEREAEREIGMEpDwyeiNUbVEQAREQAREQATSJ9Am/SJVYloEOnTo4Pr06eM6d+7s5plnnpKKffjhh91DDz1UUh4lFgEREAEREIGWTkDCJ6N3GNEzYMAA17Zt27JquOmmm/p8Ej9l4VMmERABERCBFkpArq6M3lhaesoVPXZJiJ/evXvbppYiIAIiIAIiUPMEJHwy+gjg3krDJH7SoKgyREAEREAEWgoBuboyeidLjekpdBmIH3N9FUpX6Ni0adPcxIkT3ZgxY9zUqVMLJdUxERABERABEcgsAQmfzN6abFUMIdatWzfXpUsXd9FFF5UtftZaay3Xvn37ehdnwuqbb76pdyzrO+aaay7Xs2dPt/TSS7uOHTu6KVOmuE8++cSNHz/e/fTTT2VVf8MNN3StW7d23333nZswYUJZZSiTCIiACIhAfQISPvWZaE8BAsQdEX80atSoAqnyHxo0aJBr0yb/Y/fHH3+49957zw0fPty98cYb+QvKwJE555zTHX744W7NNdd0s81W32u83377uVdffdUNHTq0JAG03nrruYEDB/ornDlzptt3333djz/+mIErVhVEQAREoPoJ1P+2rv5r0hU0MoG04o+SqomAWGGFFdwpp5wSvfyT0jX3vvnnn99dcsklbu21104UPdSPa1lttdXcsGHDXKdOnYqu8g477BClbdWqldt5552jba2IgAiIgAhURiD/T+/KylXuFkwgrfij0aNHu6+//trxcl9yySXdcsst5xBVs88+u6eHu2ehhRZyxx13XOZoDhkyxM0777xRvT788EP37LPPunfffddfBy695Zdf3h9v166dI/0BBxzgaNEqZHPPPbdbZpll6iTp1auXu+GGG+rs04YIiIAIiEB5BCR8yuOmXCkQeOKJJ9ynn35apyTcYAidVVZZxe+n9Wfbbbd1d999d510zbnRd4893HzzzRdV4bacgBsZuP6IyUHUbbfddm6vvfbywo64JlxWV199dZQvaWWnnXby6cNjCCxiq95///1wt9ZFQAREQATKICBXVxnQlKXxCMyYMcO7uZ577rnoJP369cvrTiIRLiVcTrvssosjLe6lQnFEUcGzVojVWX/99d0eOUGz5557OlpraHnJZ1tvs0106IUXXqgjeqIDuZW77rrLIe7MNt98c1vNu9xkk02iY88//3y03rdv32hdKyIgAiIgAuUTUItP+eyUsxEJ0HPsmmuu8aKG3k30mnr00UfrnXHXXXd1tJKEQsdiZAiSPvXUUwsGFh966KEOl1o8OJmg4scee8zH5/z+++/ReWmBQihhpLniiiuiY0kr1157rS8fdx51pCXrtddeS0rqll12WceI3dj06dN97zkYkJd81LEhV1liwdopAiIgAiIQEWjxwoeRiysdw8Zoaf4rI9H4S3ox0Zqyzjrr+JPRWhIXPrjAdtttt7yVIWYIYXLSSScluon69+/vBVVSAYiNjTfe2HVdaSV3SC6dWTgS9meffea7rtuxpOX333/vu7YvtdRS/jBl5hM+tDiZPfXUU+6HH35wkydNcosvsYTv2r7FFlu4+++/35JoKQIiIAIiUAaBFu/qYq4qBEulJtFTKcHS8xMobLbgggvaql/S5XvvvfeO9r3zzju+2/ixxx7rbr/9dmetNLTOsC9uO+64oyNo2Ayhcdppp7mjjz7aPfLII741h2OdcsHVBx98sCWr0zvryy+/jPYXWvnqq6+iw/HrsAO05qy66qq26WOE2LgvN2Ck2VZbbWWrWoqACIiACJRJoMULH7hUKn4kesp8uirMRk8ps3DeMkSCjXPD8ccff9wHRD/99NO+ZWfEiBHuqKOOisQPgci0DoUWxsxcddVV7rzzznOvvPKK45x0U7/yyiuj5BtssEG0bq4odkzKtcYUY1988UWULAyKjnbmVjbbbDPfqsM+BJUN5Pjggw9G17Hooou6BRZYIMymdREQAREQgRIJ1ITwgUm54keip8QnKsXkH3/8cVSadXFnB+4m4n4wgqEvvvhivx7+Y+TksKUvFD4rrrhi1HMKV1SS+2js2LHRoIGMzGxd+OfOrZuFLTm2L2kZtgxZfFA83dZbbx3t4lk1I6bnzTfftE23++67R+taEQEREAERKJ1AzQgf0JQqfiR6Sn+g0syB4DAjkNjM4n7Yfvnll6MWETtuy+uuu85WI+HCDkZaNiOOKJ9dcMEF7vrrr/d/NnLyn7Vw0XhD+fLbfuKFIguuw/YxGOJiiy3mN7nOeNf9O+64w5L63mfRhlZEQAREQARKJtDig5vjROzXdEMBzxI9cXJNvx0O5Pfbb79FFUAomNGyk89++eUX3yJEbyr+aG1h30q5gGWzyZMn22q95Ysvvuj4Cy2cewvXUzEWpvsxYe6usBUHV1t4rZTPuEA///yzQwjyh3CL16uYeiiNCIiACIiAczUnfLjpDYkfiZ5sfDTo3m1G926zsCWooZniERHW1R0BMjE3w3wYp1Osu8rOzaShTEaKFTsNxSKLLGLZHa61uDGGkBkjWBNjFLc55pgj2sUUFhI+EQ6tiIAIiEBJBGrK1RWSyef2kugJKTXv+sorrxxVgK7jZhb4y3bSTO+WjmUoGCwf4sWMKTFKsTCgefHFFy8qayh8wusgM4MthoMlEsuEoIr/heMM0U0/X6xQURVSIhEQARGoYQI1K3y453HxI9GTnU8C81wxTYPZnXfeaat+XBzb6Ny5s63WWzLVgwVBEztjrS3hNBnFihcrfEzQvZweVgRKF7IlcmPwhOKKoOnQGG3ajC74uLTy/dnghYggpsOQiYAIiIAIlE6gJl1dISZze7EvXA/TaL1pCdDqMWjQoOikxNXQ1dyMMXu23HJLv7nGGmv4ION4XAwHmRTUbMqUKbbq3V220b17d1utt2TwQ+t+vv/++7tp06a5zz//3Asom6D0wAMPdEceeWS9vLbjoIMOslU/gvRHH30UbXOdjARtxhxlhebjYjoOG5WaAR1vueUWy6qlCIiACIhAkQRqusXHGCF4JHqMRvMuGcSPiTzD8WqGDh1ap1JMJcGoxhgtOoMHD65znI2uXbu6Hj16RPvHjRsXrdOyZ0IJNxPzc8WNObNM9OAaQ/SYMe6PGSMyn3HGGfWmvOA4gyGGgdQ33nijZfNLWm3MhcX1FBI9ZKB3l/VuI8Cb1iSZCIiACIhAaQRqvsWnNFxKnSYBWmToJo54scH5wsBlznXPPff4Xk3x8w4bNixqFerWrZsfywfxijsL0cP8W9aN/PXXX3e33XZbVARj/4wcOTIa+ZlRnBEoTz75pHczIZiYqNSMQQRDY5TnPn36RG4uWm3oOo9woZfYwgsv7N107dq1i7IxJlHczcUUFGaU2ZAhjogxMsGDYBsyZEhD2XRcBERABEQgICDhE8DQatMSYOLNfEa8CxOVhrObh2mfeeYZ98ADD0QuL+JowtGYLS1ChIlK48ZYOauvvno0TQSxOknxOoipm266KZ7dz/91/PHHO0QXhmAjGDsMyLZMU3MtRowkHRqxRWGr1ujRo8PDedeJMcK9huHmk4mACIiACJRGQK6u0ngpdYUEzFUTL4b9jLHD9A60xhDPkk/0WF6mlTj99NPruKHsGIHADG6Iu8mCgu2YLZmb6/LLL/etPLbPlnSTx6V16aWX2q46S8pEUF122WXR9BJ1EgQbHXJTZuyzzz7BHlcnOJku9WEMUp2EsY1wCgu66YeDOcaSalMEREAERCCBQKtcs344GG1CEu1qDgLEjWTZCMTNkuEuw11FXA7iiYEAcWkVa3QPp/UGAcaAgaXk5RzMJca4Qx07dnSIJkTJEUccUWd0Z+p0wgkneIFXbL2UTgREQAREIF0CEj7p8kytNAmf1FA2W0H0/DrrrLPqdGdnAMW426vZKqgTi4AIiEANEpCrK6M3PexFlLUqZrluWWJFoHX//v0ds8ZjjM9zzjnnZKmKqosIiIAI1BwBBTdn9JbTMmCBs1mrInWTFU+A7vhMS0Hvsoam2Ci+VKUUAREQAREoh4BafMqh1gR56L1jM4I3wemKPgV1CkcvLjpjjSeku7pET40/BLp8ERCBTBBonRsT5ORM1ESVqEOAHk6MVkycCIGzzT03E+4tRkweMWKEXuB17pQ2REAEREAEqomAgpur6W6priIgAiIgAiIgAhURkKurInzKLAIiIAIiIAIiUE0EJHyq6W6priIgAiIgAiIgAhURkPCpCJ8yi4AIiIAIiIAIVBMBCZ9quluqqwiIgAiIgAiIQEUEJHwqwqfMIiACIiACIiAC1URAwqea7pbqKgIiIAIiIAIiUBEBCZ+K8CmzCIiACIiACIhANRGQ8Kmmu6W6ioAIiIAIiIAIVERAwqcifMosAiIgAiIgAiJQTQQkfKrpbqmuIiACIiACIiACFRGQ8KkInzKLgAiIgAiIgAhUEwEJn2q6W6qrCIiACIiACIhARQQkfCrCp8wiIAIiIAIiIALVREDCp5ruluoqAiIgAiIgAiJQEQEJn4rwKbMIiIAIiIAIiEA1EZDwqaa7pbqKgAiIgAiIgAhUREDCpyJ8yiwCIiACIiACIlBNBCR8quluqa4iIAIiIAIiIAIVEZDwqQifMouACIiACIiACFQTAQmfarpbqqsIiIAIiIAIiEBFBCR8KsKnzCIgAiIgAiIgAtVEQMKnmu6W6ioCIiACIiACIlARAQmfivApswiIgAiIgAiIQDURkPCppruluoqACIiACIiACFREQMKnInzKLAIiIAIiIAIiUE0EJHyq6W6priIgAiIgAiIgAhURkPCpCJ8yi4AIiIAIiIAIVBOBNtVUWdU12wQ6dOjg+vTp4zp37uzmmWeeiir78MMPu4ceeqiiMpRZBERABERABOIEJHziRLRdFgFEz4ABA1zbtm3Lyh/PtOmmm/pdEj9xMtoWAREQARGohIBcXZXQU96IAC09aYkeKxTx07t3b9vUUgREQAREQAQqJiDhUzFCFQAB3FuNYRI/jUFVZYqACIhA7RKQ8Knde5/qlVca02OVIbYnbhI/cSLaFgEREAERKJeAhE+55JSvUQgQ0yPx0yhoVagIiIAIiECOgISPHoPMEZD4ydwtUYVEQAREoMUQkPBpMbeyZV2IxE/Lup+6GhEQARHICgEJn6zcCdWjHgGJn3pItEMEREAERKBCAhI+FQJU9sYlIPHTuHxVugiIQOMSWGKJJdzAgQPdX//618Y9kUovmoAGMCwalRI2FwHED0bvrtBs246Hx7QuAiIgAlkgcNppp7n27du7DTfc0E2dOtX973//y0K1aroOEj41ffur5+JN3JjYsZrbth23/Vpmh0DPnj1dv379fIUefeQRN3LUqOxUrpFqwkjmQ4YM8aV/+umnjpdf2nbQQQe5tdZayxd78cUXu1deeSXtU5RUXlNcc0kVykjiNm3+fM0usMACGalVbVdDrq7avv9VdfWIm3xd3avqQmqssjT184XPX5fllquJq+cXvl3zsssu2yjXvFyOpZ1jkUUWaZRzlFJoU1xzKfXJStprrrnGff755+6FF15wDzzwQFaqVdP1+FOK1jQGXXy1ELCWHWvpqZZ6q54iIAK1SYAfa0k/2GqTRjauWsInG/dBtZhF4IwzzsgMi06dOrl1113XzTvvvO7ll192b731lvvjjz/cQgst5Nq1a+d+++03hxsjn/ErfPXVV3csP/zwQ/fSSy+577//PjE55+IX88yZM93EiRN9Gs679tprO1pM2DdhwgQfI5BYQLCTuq2zzjpuqaWWclOmTPF1nzRpUpDiz1VG3F5wwQX9Dn6V/vTTT26FFVbw1/3DDz+422677c/Es9bIg4uFaUomT57s3n33XX998YRLL720m2222VzHjh2jQ1zjMsss47dhB8O4wRduiy66qPviiy/ciy++6L788st4smi7nGuIMhe5wnV07drV0cqCS+ejjz7yzwPMQrNng7qb4eqwa+Z+5HsGFl54YX/faMXhfr/55pvuq6++smL8cvbZZ/fPAxtzzz13dIy8nGPGjBnuk08+ifaHK6U8F2G+htZLuWZ7zikThq1atfLPOGzffvtt99RTT9U7XTFc6mWatQMmlE0dP/vsM/faa68V/MxaOdxvnvHlunRxM37/3b366qvunXfe8Z9/S1PskuefzzLGNfMdkmR8nlZcccXouedzxZ8sfQKtevToMTP9YlVirRFobsFy3HHHpYYcocL12JeVFYwoGTFihNtqq63c/PPP73fvuuuudjha8mI8+eSToxdUdCC38vXXX7sTTzyx3gvt6quvdjbtB7EbnJ+XRGic/5FcjMywYcPC3XXWjz76aP8i4YUSGgLjnHPO8SIi3P+vf/3LrbHGGn7XmDFj3CabbOLmmmuuKEl4fbnvCte/f/86xy3hd9995/773/9GcSZMWHvdddfZ4cTlVVdd5e6///7oGC982Cy++OLRPlvhhc51P/bYY7YrWpZyDVGmEla22247t8ceezhER9wQfhdccIH74IMP/KHrr7++jiCJpycOJx7vs88++zgm+W3dunU8uXv//ffd0KFDo+dlm222caQvZH379vUCKExT6nMR5m1ovZRrDp/zK664wh1wwAFeHHOOqbln6O8HHhidrhQuUaZZK8svv7w76qijos9peJwA43PPPde98cYb4e5ofccdd3S77babC2NzOMjnb/To0e6mm26K0hazQvwVwgv797//7V1eYb7VVlvNf67sOyU89uOPPzqYjR8/Ptyt9QoJKManQoDK3rII8Ivr/PPPryd6uErEBEG6SV9QRoEvy6E5gUErTZLRuoJA4Is5n11yySX1RA9pOT8uvp122ikx67HHHutbDOKih8S8tAcPHuz4Us9nvHxD0ROmQ4wdeeSReY/PN9987vjjj3dLLrmkz5ZUh7C8+DrnRZgliR7SwvXQQw91RxxxRDxrne1C11AnYZEbe+65p/vb3/6WKHooYrHFFnNnnnmmXxZTZMiFe/Kf//zHIWaSRA/ldcm1OJDGjof5850vnqbS5yLfeYrdH6+P5TswJ3JoWYlbOVzCMvjBcOqpp+b9nPLD5JRTTvGtmmE+1rfYYgvHPY+LHo5xHbvssosX52yXY3EWtOjyoy3fdwo/IAYMGOA222yzck6nPHkIyNWVB4x21yaBw3IvV3vJ8AuPX1rPPvusf6FvtNFGeQWN0Tr77LNdh5wIwH7++WfHr2FcFriP9ttvPy8cKJ+X+GGHHWbZ6iz50v3ll1/8L0POjbuNL0hrcUD4xF1Qe++9d9TDh6b0u+66yz3xxBPejcUxXtDY7rvv7u677z5ffp2TztrgmnHH0MROHTDybr755rNSON/kT8sT7rutt97arbfeev5FwZc6L1lahXCTnXTSSf7FtuWWWzpaizBcONYS9N577/l9vPx4uVuL1+8518Ktt97qr59zI9Zwm2Gca4MNNnBPPvmk3076l3QNSeka2kd9QqH48ccfu3HjxnmXCWOy0FLGveSPlxfXTYsV7jxaDf/xj3/4U/Ac8FxgoWtshx12cJ1z7g2Me0bw69ixYz1LWhVXWWUV/7LFRYXwpMXrwQcfjNyKh/zzn67TrJYE8tFNGnah+zCt58JXMs+/Uq45XgT1/Tzngvog9yyZ27gcLlYuAppnyYQLLSbDhw93r7/+un+G9s99Bu3zefjhh3uu06ZN89lpefn73/9uRbnnn3/e3Xnnnf7eUCfcx1i3bt38vcFtVqnxzJgYoh7jcvfxpZxbne8a/sydiUjkMwcvWeUEJHwqZ6gSWgiBNddcM3qRcEm8rHgZYc8995wXG7iwVl55Zb8v/o9fisTVYIgGvkRNPPCl/vTTT/tma16UxIDwRUvcTtxw6yCKvvnmG3+Ilzy/Ymky50tyzjnn9AKKFypGr6Ftt93Wr/PvhBNO8OKEdWIKuAZrbufc+++/f153GW6VZ555hqyRhe4u6nv66adHxy688EKHgEHUYRYvxLq5ElZddVU2veFmiL8wcHcgFDCunRcSsT0Y4orrR1BZ120EBS/5fLESSdfgCyvxn4k1siEmcJ3YOeFA66C5rcwtibDDvv32W7/kH9cUv2b2hwH6V155pRdV7Md46SIoeMlixKlgxGBZWdNz4tKcodxn2+8T5v6l+VxYmUnLUq45zA8XXvz2nNuxcrhYXp4lhCKGkPhnThzaZ5A4Nz6DsEbUIo569+7t7rjjDp+eZ8xECPsQTGYMTUBZVreDDz7Yt8TY8XKWuHYRyWa0QnEfMeIJb7zxRv99QT35ccCzQKyhrHIC9dsZKy9TJYhAVRLgV7bZpJxQMdFj+1gifOzlF+5nvVevXtEuvrTsC9d28uvz8ccft03fchFtBCvEH8RfBgS52i9TkhJka8avUTMLwrRtWxJPYUZLQpIhJuKih3T80hw5cqT/46URN+J07JcoX9AmAuLp8m2vv/760SG6+5roiXbmVhAzdg5+BXfv3j08HK3nu4YoQQkrc8wxR8HUvJxokWNJYK61yBXMFBy8+eabI67WWzE47Fu9bDufK8SOJy3Tei6Syk5jH6I5/pxTbiVcaFk1u+GGG+p9Bvns0ppoxo8PjGcw3kpkaWx5+eWXR5/9Up9xKyNcIvwKGSL39ttv988Xz1j8+6RQXh0rTKBN4cM6KgK1QyD8Mru/wHgbuFKSLHw5IUDC8ix92OMm7Pljx1mG7pBw//Tp06PYozDweqWVVoqS5TtvWGaYN8qYW4n3ILJjBOSGg+MRd4DriXgeWp+wkEkpAoAWqPBXL4IxyWhx4dro7YXxguPXe9zyXUM8XTHbCL59993XJ+WaeFHz0sSFaC8t3CrlGuWHRgsALYbEOSEgYWMWrtu+hpZpPRcNnafc4+baiuevhIsFESNw4uXYedhvPGlRxHChmtHqkvTZ5Tg/XnheuT98jvL10LOyCi3Jy2fann9aD3FR47a0chGB/MnSJSDhky5PlVbFBAh6NLMmZ9tuaBl22SYtAdINWTxPQ+nzHbc4AI7jbuOvkJlYKZQmfowveuJd6OGE8EnLQvGHmDBBkVQ+MTYmfMylmJQurX285IjpsfgmulXjmjnkkEO8KwsBhEskbIkr9dy4XOi9xHQG9jIutYx86Zviuch37kr3l8OFHx7WavPrr7/mrQItKXfffXed4+FnEcFIB4OGDFdipa4nejYS78fni/tFDB5/xMhRNkI7n0BsqH46np+AhE9+NjpSYwTCF0+pLQfhF2ex2BpypRRbjn3ZF5ve4hiKTQ8XhJzF4Vg+flXjfmJZjpiiHHqDmYVBubYvXIZxM2HrWpgm7XXcG8Qw0bPLfpnDj9YZRCB/BIszOm+pBk/cmvEWMpiaACyXK3Vp7Oei1OstNn25XMLn0+Lfij2nxQUVm5504bNbSr4wLeKZ2KNBgwZFXd45Tn0I4ueP54+4PXsmwvxaL4+AhE953JSrBRL4JRcsbN25ebEx5k6xFqaNxxHkKyMpliVf2kL7EQwm2hiLp6EWCIuVKVRmeCwMPuba7r33Xt/SYc3xpL322mujoNIwb0PrYctaQy/q8MVGa0xTmY28S9dyujsTI4UrxAQksWG0BjFGSylGTzATPbRCMEYULUx2f7inoyqY16yxn4tSrrWUtOVyCQfptM9xsefF5WTGmEwElzdkofu3obSFjuNuoyURMc9wDPQWxI1sn2ni+S699NI6Pc4KladjDROQ8GmYkVLUCIFvcwOoWVdXmrEZqbVYI0gTUUCTNX8MdMZ2UxiDB5oooM78ikzTcMOY0aX60Ucftc2Kl4g0XvR8ySMCaOHIF8RpYwRx0lBoVlyJIgtgMEEbPBJ33zHHHBP1tuJlxX0v9p4TH2L3jPgoxmoJhWSRVSqYrLGfi4InL/NgJVzgZ5/BUlvKEOAWGM1wDrfcckuZV1B+Nlo0Eb/88SzRm5JxgzDc8PSOTEtslV/LlpFTvbpaxn3UVaRAgCHtzfjllWT8kuRLKcnwy5vhGslnxC/kKyNfnkL7w3rnG9yQ/LSolBOfE75EksQgx8M0heqadGz6rHFUOJZvVGLqTfdxs7TFnZUbLonzoMWFPxMpdpwWJwYuNKHD/bSu55am0DIc4BLhlyR6Ko1jauznotD1lXusUi7WEkhrHGNMJRnuI4QNfwwZgNmYUqwjYgs9z/k6B5C3FGMwTnu+4p9bnitGiA57vTGujywdAsnf4OmUrVJEoKoIhEPRxwft40J4udGLx1wc8YsLe5Hg/ghf1JaWfcSN0HQdjnljx8tZht1zaRUJB92z8viyprWCYEoLELZjDS3DQNG99tqrTnLE1HnnnVcnniQeuxS24CT1lrk11zpmxjgp8VGt4Y77w8Qi8RtJQw1YGWktcUXSEsUfYx8lmQkfjoWcwhiTMMjYygjdMjDs2bOnHfJLenYhrApZGPNhAzyG6Rv7uQjPxXpD1xxPn7RdKRcGeDQjSDguUriXobi2YQQYZsJizLgf4fNm5bEk1oYhHSg7bgxGmXSv4+lsmyEQ7Pni+4L1uP0UuHStfpaGeobDWth+Wk5xy8aNz15a3znxsqttW66uartjqm+jEeBLl19+9mXCaLmM78EXFF8aCIb4F2lYGcYN6dWrl0/DS5ph8xkXh27Y/IJkgEQGouOY+fPJU6nRCoMQsAH+6NXF4HvMa0VrAi0RjFdiv2KZdqOU3ij0pjI3AKPXMhgiI+ES14JIsTgVuw7ioybOGsiPfeQ3oxcXgzMS18CkrfRYYRyg7bff3n8p8+XPAInUnRGv4cRw/faFjdBgoLemMLoV25hHtAIwFhJ1ZmBGngX28fLBeClRXzPcJbiwEMlcE12V6X6PmCJ+hIEcEQoWi0KMBy0UcOP5o+XDhB5lJr0UmbjVRCJzrFEerY70WKI+jf1c2LXasqFrtnSFlpVyYbwpRgpHgPDHDwzGhsJNyWeY58wCmWFkI4CzjqBhkEKMnl2XX3aZG5uLuYIz23x+LaCZchCWFo+FSOVecM/5ccSApw0ZIg0RxjOCK4s4OT5XfGcwuSrPV9jSSPyeGc8HY1vxjOD2tXqTj1gz9lNvniuMOD1GG8d4PhhRvpZNwqeW776uvR4BetnwhWK/3Hjx2cuvXuKEHXwBMlUD+flCQ4CEIwBbFr7E0hA9Vh69rhhp2qamIEaJv7gRy8AItaUYc2jh9rGXPGOl2HgpVo695NmmVYsZ1c0QC7ROkB8mNhw/LyAL3mVSVsSBjZHSKycg+QuNc3B/QrdEeDztdQZDJJDbXCa89BAY/MWNF2zcENLmuoEJf2Hg7EUXXeR785CPF1X8noVM4UZZYddmBCNuGwzxSQ8zjBnObdymxnwu/Mli/xq65ljyxM1KuCCMGWWZFhueN7gwF1rcCCZnctvQCGInjsaYEu8XjlpuacnLvHUmethvnzXuE62WxQgfWkJ5nhmpHGGLCObHi/2AsfOx5IdAOAYY3ykmjPlRgPuceDnGI7L9fEb5scN51sqJNjPS1Lrwmc1gaCkCIvD/g/gxJQKtFLx4QuMLhBdJ6N4Ij7POL0vcIrTAhF+MHKM8Ak4RPMy4HFpYZrgeppkZBEvzCzU0fu0PHDjQD3YWP0Y6Yh94IdJtNqxXuD4jVqaVT9Al0zUwmnXcYEILUNgLJi4UET28zMJzxcvhhc4UH0ncSAs3ZkFPGlk6LDfpGvilywus2L8w3oJf4ZyXloj480C9eNETJ5I0azyCMYz7In1oXEu+NDBnRnVmLDdjzrbQGM2XVqlCVupzgbgqlpOlQ9CZ5bseOx4+2+G6HWdZKRem7uBZQmTGz8GzyDx0tJCEItLOz+f7rLPOSuwZybPO55tA9PhwFybGeUYQUMUarYCIKIRq+Bxbfnqb0dLIxMahIcot/Ve5lh3rycln3K6ZGC/qjD0X9FIjTa1bq5xyrPvtXutEdP1lEeAXe3Mav/DSNn454SbilxhuDAJQaZK2aRv4Eu3bt2/B0zK+D24iXpy8qOxLqWCmFA7S4oQA4Xz0BEkSQ+WchiBjmv2J48Hlw3UVa/z6xtVHq87kyZP9vET5eMDtL3/5i683LzDcKOUa7g9rrSq2DF7qSYY7A3cdbgTcdfZiSUrLPmvJoSUOlwQvzqQ8XC9l80zhGk1Kk+8cuF+5J7QaUH6hYRIaei4IyreWo3zni+9HrIZd+Yu95ng5SduVcLHyEHOdc/E33C+EarEGT7jCFxdUQ886bime0/hnzebJ47y0yoY/EuJ1MfcxgofnPino3fLAmecqLuCoN89ofD9uY8RSoTKt7Ja+lKurpd9hXV9JBGgyZiZkfmHxBRafRHTnnXeOyivmZUwafp01tdEcX0xze6n1ouWIF105lsQzXzlwy+IvU1oL+CvWEHa0BliLQL58XG8xz1NSfl5kSS1hSWkbei7yCdGksmxfvCWs2Gu2/IWWlXCxchEAcRFgxwotEQnxiV8LpTf3YjxNGAPXEF9EayHhGpZNWUnXRb2T9ocDgIbl1OK6hE8t3nVdcyIBfkHRVE9wLq0lgwcPrvMlhO/eAgQpoDkETWLFtbMgAVyXpbT4mAuhYKEt9CAxV2EQbTGXaW6WYtLWWhqeOwuI5tpLaXGqNVZNeb0SPk1JW+fKNAHcNzQTY7T8EJdCjARjadBMbAHPHOfX0/Dhw1mVZZyAmvaLv0GIvnDsmOJzKmVIgDix7t27+0lnCXjGaKEptjUnLEvr6RNQcHP6TFVilRJA5NA6QCyOGfE9+NFD0UN8SmPEFNk5tRQBEahuAnRFZyyd0M1lXeer+8paRu3V4tMy7qOuIiUC/OJlkDLG7KAXDQG2BFgS20IvCb68mE9JJgIiIAL5CBDvQ7A67nN+UI0fP94PHpovvfY3LQH16mpa3i32bIwNg3uoOYwYg7BXSXPUQecUAREQARGoDgJydVXHfcp8LcORepu6ss157qa+Vp1PBERABESgMgISPpXxU+5ZBOgJYhMENiUUzllqL5SmrJ/OJQIiIAIikC0CrXODO52crSqpNtVIgAHXGCiPwb4Y5M7mhWqsa8G9xVxEI0aMaHBgscaqg8oVAREQARGoPgKK8am+e6Yai4AIiIAIiIAIlElArq4ywSmbCIiACIiACIhA9RGQ8Km+e6Yai4AIiIAIiIAIlElAwqdMcMomAiIgAiIgAiJQfQQkfKrvnqnGIiACIiACIiACZRKQ8CkTnLKJgAiIgAiIgAhUHwEJn+q7Z6qxCIiACIiACIhAmQQkfMoEp2wiIAIiIAIiIALVR0DCp/rumWosAiIgAiIgAiJQJgEJnzLBKZsIiIAIiIAIiED1EZDwqb57phqLgAiIgAiIgAiUSUDCp0xwyiYCIiACIiACIlB9BCR8qu+eqcYiIAIiIAIiIAJlEpDwKROcsomACIiACIiACFQfAQmf6rtnqrEIiIAIiIAIiECZBCR8ygSnbCIgAiIgAiIgAtVHQMKn+u6ZaiwCIiACIiACIlAmAQmfMsEpmwiIgAiIgAiIQPURkPCpvnumGouACIiACIiACJRJQMKnTHDKJgIiIAIiIAIiUH0EJHyq756pxiIgAiIgAiIgAmUSkPApE5yyiYAIiIAIiIAIVB+B/wO9N/2l2KPKEwAAAABJRU5ErkJggg==)" + ], + "metadata": { + "id": "cVofNXVW-EMo" + } + }, + { + "cell_type": "code", + "source": [ + "'''\n", + " {\n", + " \"doc\": [\n", + " {\n", + " \"prompt_states\": {\n", + " \"type\": \"Prompt\",\n", + " \"data\": {\n", + " \"_components\": {\n", + " \"_ordered_dict\": true,\n", + " \"data\": []\n", + " },\n", + " \"_parameters\": {\n", + " \"_ordered_dict\": true,\n", + " \"data\": []\n", + " },\n", + " \"training\": false,\n", + " \"teacher_mode\": false,\n", + " \"tracing\": false,\n", + " \"name\": \"Prompt\",\n", + " \"_init_args\": {\n", + " \"template\": null,\n", + " \"prompt_kwargs\": {}\n", + " },\n", + " \"template\": \" You are a doctor User: {{input_str}}\",\n", + " \"prompt_variables\": [\n", + " \"input_str\"\n", + " ],\n", + " \"prompt_kwargs\": {}\n", + " }\n", + " },\n", + " \"time_stamp\": \"2024-11-29T12:36:33.302956\"\n", + " }\n", + " ]\n", + "}\n", + "'''" + ], + "metadata": { + "id": "dPd9i6_t7ERJ" + }, + "execution_count": null, + "outputs": [] + } + ] +} diff --git a/tutorials/adalflow_tracing.py b/tutorials/adalflow_tracing.py new file mode 100644 index 00000000..d49008e6 --- /dev/null +++ b/tutorials/adalflow_tracing.py @@ -0,0 +1,88 @@ +""" +This script demonstrates the usage of AdalFlow's tracing functionality. +It shows how to track Generator states and changes during development. +""" + +import os +from getpass import getpass +from adalflow.tracing import trace_generator_states +from adalflow.core import Generator +import adalflow as adal +from adalflow.components.model_client import OpenAIClient + + +def setup_environment(): + """Setup API keys and environment variables.""" + # In a production environment, you might want to use environment variables + # or a configuration file instead of getpass + if "OPENAI_API_KEY" not in os.environ: + openai_api_key = getpass("Please enter your OpenAI API key: ") + os.environ["OPENAI_API_KEY"] = openai_api_key + + if "GROQ_API_KEY" not in os.environ: + groq_api_key = getpass("Please enter your GROQ API key: ") + os.environ["GROQ_API_KEY"] = groq_api_key + + print("API keys have been set.") + + +# Define the template for the doctor QA system +template_doc = r""" You are a doctor User: {{input_str}}""" + + +@trace_generator_states() +class DocQA(adal.Component): + """ + A component that uses a Generator to answer medical questions. + The @trace_generator_states decorator automatically tracks changes + to any Generator attributes in this class. + """ + + def __init__(self): + super(DocQA, self).__init__() + self.generator = Generator( + template=template_doc, + model_client=OpenAIClient(), + model_kwargs={"model": "gpt-4-turbo-preview"}, + ) + + def call(self, query: str) -> str: + """ + Process a medical query and return the response. + + Args: + query: The medical question to be answered + + Returns: + The generated response from the doctor AI + """ + return self.generator(prompt_kwargs={"input_str": query}).data + + +def main(): + """Main function to demonstrate tracing functionality.""" + # Setup environment + setup_environment() + + # Initialize the DocQA component + doc_qa = DocQA() + + # Example queries + queries = [ + "What are the common symptoms of the flu?", + "How can I manage my allergies?", + "What should I do for a minor burn?", + ] + + # Process each query + for query in queries: + print(f"\nQuery: {query}") + response = doc_qa.call(query) + print(f"Response: {response}") + + print("\nNote: Generator states have been logged to the traces directory.") + print("You can find the logs in: ./traces/DocQA/generator_state_trace.json") + + +if __name__ == "__main__": + main() From 9487dcbbe83cd9aed819d29fca20f00254bbef22 Mon Sep 17 00:00:00 2001 From: fm1320 Date: Wed, 4 Dec 2024 02:56:22 +0000 Subject: [PATCH 23/40] edit imports in .ipynb --- use_cases/question_answering/chatbot.ipynb | 29 +++++++++++++++++----- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/use_cases/question_answering/chatbot.ipynb b/use_cases/question_answering/chatbot.ipynb index fc08647c..3db858a4 100644 --- a/use_cases/question_answering/chatbot.ipynb +++ b/use_cases/question_answering/chatbot.ipynb @@ -20,9 +20,24 @@ "metadata": {}, "outputs": [], "source": [ - "# Import needed modules from LightRAGfrom adalflow.core.component import Component\n", + "from IPython.display import clear_output\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "clear_output()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Import needed modules from Adalflow\n", + "import os\n", + "from getpass import getpass\n", + "from adalflow.core.component import Component\n", "from adalflow.core.generator import Generator\n", - "from adalflow.components.memory.memory import Memory" + "from adalflow.components.memory.memory import Memory\n", + "from adalflow.components.model_client import OpenAIClient # Here, we use the OpenAIClient as an example, but you can use any other clients (with the corresponding API Key as needed), such as AnthropicAPIClient" ] }, { @@ -31,9 +46,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Here, we use the OpenAIClient as an example, but you can use any other clients (with the corresponding API Key as needed), such as AnthropicAPIClient\n", - "from adalflow.components.model_client import OpenAIClient\n", - "OPENAI_API_KEY=\"YOUR_API_KEY\" # Replace with your OpenAI API Key, or you can put it in a .env file" + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "# Set environment variables\n", + "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "# Replace with your OpenAI API Key, or you can put it in a .env file" ] }, { @@ -48,7 +65,7 @@ " super().__init__()\n", " self.generator = Generator(\n", " model_client=OpenAIClient(),\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}\n", + " model_kwargs={'model': 'gpt-4o-mini'}\n", " )\n", " self.chat_history = Memory() # Memory to store the chat history\n", " \n", From d91638ed7d1fa680727e77227390011857c55e61 Mon Sep 17 00:00:00 2001 From: akashmangoai Date: Sun, 27 Oct 2024 14:25:28 +0530 Subject: [PATCH 24/40] lazy import lancedb --- adalflow/adalflow/components/retriever/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/adalflow/adalflow/components/retriever/__init__.py b/adalflow/adalflow/components/retriever/__init__.py index 2d249604..adc66a02 100644 --- a/adalflow/adalflow/components/retriever/__init__.py +++ b/adalflow/adalflow/components/retriever/__init__.py @@ -27,6 +27,11 @@ OptionalPackages.QDRANT, ) +LanceDBRetriever = LazyImport( + "adalflow.components.retriever.lancedb_retriver.LanceDBRetriever", + OptionalPackages.LANCEDB, +) + __all__ = [ "BM25Retriever", "LLMRetriever", @@ -34,6 +39,7 @@ "RerankerRetriever", "PostgresRetriever", "QdrantRetriever", + "LanceDBRetriever", "split_text_by_word_fn", "split_text_by_word_fn_then_lower_tokenized", ] From 88fa0a2c0025b70afd7f06e875976e255dc7acb0 Mon Sep 17 00:00:00 2001 From: akashmangoai Date: Sun, 27 Oct 2024 14:26:26 +0530 Subject: [PATCH 25/40] lancdb as retriver support --- .../components/retriever/lancedb_retriver.py | 77 +++++++++++ adalflow/tests/test_lancedb_retriver.py | 123 ++++++++++++++++++ 2 files changed, 200 insertions(+) create mode 100644 adalflow/adalflow/components/retriever/lancedb_retriver.py create mode 100644 adalflow/tests/test_lancedb_retriver.py diff --git a/adalflow/adalflow/components/retriever/lancedb_retriver.py b/adalflow/adalflow/components/retriever/lancedb_retriver.py new file mode 100644 index 00000000..3949b65b --- /dev/null +++ b/adalflow/adalflow/components/retriever/lancedb_retriver.py @@ -0,0 +1,77 @@ +import logging +import numpy as np +import pyarrow as pa +import lancedb +from typing import List, Optional, Sequence, Union, Dict, Any +from adalflow.core.embedder import Embedder +from adalflow.core.types import ModelClientType, RetrieverOutput, RetrieverOutputType + +# Initialize logging +log = logging.getLogger(__name__) + +# Defined data types +LanceDBRetrieverDocumentEmbeddingType = Union[List[float], np.ndarray] # single embedding +LanceDBRetrieverDocumentsType = Sequence[LanceDBRetrieverDocumentEmbeddingType] + +# Step 2: Define the LanceDBRetriever class +class LanceDBRetriever: + def __init__(self, embedder: Embedder, dimensions: int, db_uri: str = "/tmp/lancedb", top_k: int = 5, overwrite: bool = True): + self.db = lancedb.connect(db_uri) + self.embedder = embedder + self.top_k = top_k + self.dimensions = dimensions + + # Define table schema with vector field for embeddings + schema = pa.schema([ + pa.field("vector", pa.list_(pa.float32(), list_size=self.dimensions)), + pa.field("content", pa.string()) + ]) + + # Create or overwrite the table for storing documents and embeddings + self.table = self.db.create_table("documents", schema=schema, mode="overwrite" if overwrite else "append") + + def add_documents(self, documents: Sequence[Dict[str, Any]]): + """Adds documents with pre-computed embeddings.""" + if not documents: + log.warning("No documents provided for embedding") + return + + # Embed document content using Embedder + doc_texts = [doc["content"] for doc in documents] + embeddings = self.embedder(input=doc_texts).data + + # Format embeddings for LanceDB + data = [{"vector": embedding.embedding, "content": text} for embedding, text in zip(embeddings, doc_texts)] + + # Add data to LanceDB table + self.table.add(data) + log.info(f"Added {len(documents)} documents to the index") + + def retrieve(self, query: Union[str, List[str]], top_k: Optional[int] = None) -> List[RetrieverOutput]: + """Retrieve top-k documents from LanceDB for given query or queries.""" + if isinstance(query, str): + query = [query] + + # Embed the query text(s) with Embedder + query_embeddings = self.embedder(input=query).data + output: List[RetrieverOutput] = [] + + # Perform search in LanceDB for each query + for query_emb in query_embeddings: + results = ( + self.table.search(query_emb.embedding) + .limit(top_k or self.top_k) + .to_pandas() + ) + + # Gather indices and scores from search results + indices = results.index.tolist() + scores = results["_distance"].tolist() + + # Append results to output + output.append(RetrieverOutput( + doc_indices=indices, + doc_scores=scores, + query=query[0] if len(query) == 1 else query + )) + return output diff --git a/adalflow/tests/test_lancedb_retriver.py b/adalflow/tests/test_lancedb_retriver.py new file mode 100644 index 00000000..2f52a55d --- /dev/null +++ b/adalflow/tests/test_lancedb_retriver.py @@ -0,0 +1,123 @@ +import unittest +from unittest.mock import Mock, MagicMock +import numpy as np +from adalflow.components.retriever import LanceDBRetriever +from adalflow.core.embedder import Embedder +from adalflow.core.types import RetrieverOutput, Document + +# Mock LanceDB and PyArrow imports since they are specific to LanceDB +lancedb = MagicMock() +pa = MagicMock() + +class TestLanceDBRetriever(unittest.TestCase): + def setUp(self): + # Basic configuration + self.dimensions = 128 + self.embedder = Mock(spec=Embedder) + self.db_uri = "/tmp/test_lancedb" + + # Mock embedding output with a simple structure + self.dummy_embeddings = np.random.rand(10, self.dimensions).astype(np.float32) + self.embedder.return_value.data = [ + Mock(embedding=embedding) for embedding in self.dummy_embeddings + ] + + # Initialize LanceDBRetriever with mocked embedder + self.retriever = LanceDBRetriever( + embedder=self.embedder, dimensions=self.dimensions, db_uri=self.db_uri + ) + + # Mock LanceDB table and connection + self.retriever.db.create_table = MagicMock(return_value=Mock()) + self.retriever.table = self.retriever.db.create_table.return_value + + def test_initialization(self): + # Check dimensions and embedder assignment + self.assertEqual(self.retriever.dimensions, self.dimensions) + self.assertEqual(self.retriever.top_k, 5) + + def test_add_documents(self): + # Sample documents + documents = [{"content": f"Document {i}"} for i in range(5)] + + # Mock LanceDB table add method + self.retriever.table.add = MagicMock() + + # Add documents to LanceDBRetriever + self.retriever.add_documents(documents) + + # Ensure add method was called + self.retriever.table.add.assert_called_once() + # Verify embeddings were passed to LanceDB add method + added_data = self.retriever.table.add.call_args[0][0] + self.assertEqual(len(added_data), len(documents)) + self.assertIn("vector", added_data[0]) + self.assertIn("content", added_data[0]) + + def test_retrieve(self): + # Prepare a sample query and mocked search result from LanceDB + query = "test query" + dummy_scores = [0.9, 0.8, 0.7] + dummy_indices = [0, 1, 2] + + # Set up mock search result as if it was retrieved from LanceDB + self.retriever.table.search = MagicMock(return_value=Mock()) + self.retriever.table.search().limit().to_pandas.return_value = Mock( + index=dummy_indices, _distance=dummy_scores + ) + + # Retrieve top-k results for the query + result = self.retriever.retrieve(query) + + # Check if retrieve method returns expected output structure + self.assertIsInstance(result, list) + self.assertEqual(len(result), 1) + self.assertIsInstance(result[0], RetrieverOutput) + self.assertEqual(result[0].query, query) + self.assertEqual(result[0].doc_indices, dummy_indices) + self.assertEqual(result[0].doc_scores, dummy_scores) + + def test_retrieve_multiple_queries(self): + # Prepare multiple queries and mocked search result + queries = ["query 1", "query 2"] + dummy_scores = [[0.9, 0.8], [0.85, 0.75]] + dummy_indices = [[0, 1], [2, 3]] + + # Set up mock for each query's result + self.retriever.table.search().limit().to_pandas.side_effect = [ + Mock(index=dummy_indices[0], _distance=dummy_scores[0]), + Mock(index=dummy_indices[1], _distance=dummy_scores[1]), + ] + + # Retrieve for multiple queries + results = self.retriever.retrieve(queries) + + # Verify the structure and content of the results + self.assertEqual(len(results), len(queries)) + for i, result in enumerate(results): + self.assertEqual(result.query, queries[i]) + self.assertEqual(result.doc_indices, dummy_indices[i]) + self.assertEqual(result.doc_scores, dummy_scores[i]) + + def test_empty_document_addition(self): + # Ensure warning log for empty document list + with self.assertLogs(level='WARNING'): + self.retriever.add_documents([]) + + def test_retrieve_with_empty_query(self): + # Check empty query handling, expecting a list with empty RetrieverOutput + result = self.retriever.retrieve("") + self.assertEqual(result, [RetrieverOutput(doc_indices=[], doc_scores=[], query="")]) + + def test_add_documents_embedding_failure(self): + # Simulate embedding failure + self.embedder.side_effect = Exception("Embedding failure") + documents = [{"content": "test document"}] + + with self.assertRaises(Exception) as context: + self.retriever.add_documents(documents) + + self.assertEqual(str(context.exception), "Embedding failure") + +if __name__ == "__main__": + unittest.main() From cacc74ec6e5b1ae296b7ba05dc5b31e35c3fcb82 Mon Sep 17 00:00:00 2001 From: akashmangoai Date: Sun, 27 Oct 2024 14:27:04 +0530 Subject: [PATCH 26/40] lancedb pacage added --- docs/pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/pyproject.toml b/docs/pyproject.toml index 3c69682b..4c45f01c 100644 --- a/docs/pyproject.toml +++ b/docs/pyproject.toml @@ -33,6 +33,7 @@ pgvector = "^0.3.0" faiss-cpu = "^1.8.0.post1" ollama = "^0.3.0" qdrant-client = "^1.10.1" +lancedb = "^0.14.0" diskcache = "^5.6.3" torch = "^2.4.0" From 396160aa9b365da9b5963aa96b61574065c89185 Mon Sep 17 00:00:00 2001 From: akashmangoai Date: Sun, 27 Oct 2024 14:27:40 +0530 Subject: [PATCH 27/40] lancedb support added --- adalflow/adalflow/utils/lazy_import.py | 5 ++ docs/source/tutorials/retriever.rst | 81 ++++++++++++++++++++++++++ 2 files changed, 86 insertions(+) diff --git a/adalflow/adalflow/utils/lazy_import.py b/adalflow/adalflow/utils/lazy_import.py index 3451714b..831e9906 100644 --- a/adalflow/adalflow/utils/lazy_import.py +++ b/adalflow/adalflow/utils/lazy_import.py @@ -74,6 +74,11 @@ class OptionalPackages(Enum): "Please install faiss with: pip install faiss-cpu (or faiss if you use GPU)", ) + LANCEDB = ( + "lancedb", + "Please install lancedb with: pip install lancedb .", + ) + # db library SQLALCHEMY = ( "sqlalchemy", diff --git a/docs/source/tutorials/retriever.rst b/docs/source/tutorials/retriever.rst index 7e1a30dd..4583ac0e 100644 --- a/docs/source/tutorials/retriever.rst +++ b/docs/source/tutorials/retriever.rst @@ -369,6 +369,87 @@ The printout is: In default, the score is a simulated probabity in range ``[0, 1]`` using consine similarity. The higher the score, the more relevant the document is to the query. You can check the retriever for more type of scores. +LanceDBRetriever +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +To perform semantic search using LanceDB, we will use :class:`LanceDBRetriever`. +The `LanceDBRetriever` is designed for efficient vector-based retrieval with LanceDB, leveraging embeddings that can be either ``List[float]`` or ``np.ndarray``. +LanceDB supports in-memory and disk-based configurations and can handle large-scale data with high retrieval speed. + +.. note :: + The ``lancedb`` package is optional. Ensure you have it installed in your environment to use LanceDBRetriever. + +We will prepare the document embeddings using the `content` field. + +.. code-block:: python + + from adalflow.core.embedder import Embedder + from adalflow.core.types import ModelClientType + + model_kwargs = { + "model": "text-embedding-3-small", + "dimensions": 256, + "encoding_format": "float", + } + + embedder = Embedder(model_client=ModelClientType.OPENAI(), model_kwargs=model_kwargs) + output = embedder(input=[doc["content"] for doc in documents]) + documents_embeddings = [x.embedding for x in output.data] + +After initializing the LanceDB retriever, we can add documents and perform retrievals. The retriever can be set with its top-k hyperparameter during initialization. + +.. code-block:: python + + from adalflow.components.retriever import LanceDBRetriever + retriever = LanceDBRetriever(embedder=embedder, dimensions=256, db_uri="/tmp/lancedb", top_k=2) + + print(retriever) + +The printout: + +.. code-block:: + + LanceDBRetriever( + top_k=2, dimensions=256, total_documents=0 + (embedder): Embedder( + model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, + (model_client): OpenAIClient() + ) + ) + +We can add documents to LanceDB and use the retriever for query-based searches. + +.. code-block:: python + + documents = [ + { + "title": "The Impact of Renewable Energy on the Economy", + "content": "Renewable energy technologies not only help in reducing greenhouse gas emissions but also contribute significantly to the economy by creating jobs." + }, + { + "title": "Understanding Solar Panels", + "content": "Solar panels convert sunlight into electricity by allowing photons, or light particles, to knock electrons free from atoms." + }, + { + "title": "Pros and Cons of Solar Energy", + "content": "While solar energy offers substantial environmental benefits, such as reducing carbon footprints and pollution, it also has downsides." + }, + { + "title": "Renewable Energy and Its Effects", + "content": "Renewable energy sources like wind, solar, and hydro power play a crucial role in combating climate change." + } + ] + + # Add documents to LanceDB + retriever.add_documents(documents) + + # Perform retrieval queries + output_1 = retriever.retrieve(query="What are the benefits of renewable energy?") + output_2 = retriever.retrieve(query="How do solar panels impact the environment?") + print("Query 1 Results:", output_1) + print("Query 2 Results:", output_2) + +This setup allows the `LanceDBRetriever` to function as an efficient tool for large-scale, embedding-based document retrieval within LanceDB. + BM25Retriever ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ So the semantic search works pretty well. We will see how :class:`BM25Retriever` works in comparison. From e8a87b06fcda82cf7133ad09e350959ead33cedc Mon Sep 17 00:00:00 2001 From: akashmangoai Date: Thu, 31 Oct 2024 12:40:13 +0530 Subject: [PATCH 28/40] added subclass & doc --- .../components/retriever/lancedb_retriver.py | 38 +++++++++++++++++-- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/adalflow/adalflow/components/retriever/lancedb_retriver.py b/adalflow/adalflow/components/retriever/lancedb_retriver.py index 3949b65b..e6550e73 100644 --- a/adalflow/adalflow/components/retriever/lancedb_retriver.py +++ b/adalflow/adalflow/components/retriever/lancedb_retriver.py @@ -4,7 +4,8 @@ import lancedb from typing import List, Optional, Sequence, Union, Dict, Any from adalflow.core.embedder import Embedder -from adalflow.core.types import ModelClientType, RetrieverOutput, RetrieverOutputType +from adalflow.core.retriever import Retriever +from adalflow.core.types import RetrieverOutput # Initialize logging log = logging.getLogger(__name__) @@ -14,8 +15,24 @@ LanceDBRetrieverDocumentsType = Sequence[LanceDBRetrieverDocumentEmbeddingType] # Step 2: Define the LanceDBRetriever class -class LanceDBRetriever: +class LanceDBRetriever(Retriever[LanceDBRetrieverDocumentEmbeddingType, Union[str, List[str]]]): def __init__(self, embedder: Embedder, dimensions: int, db_uri: str = "/tmp/lancedb", top_k: int = 5, overwrite: bool = True): + """ + LanceDBRetriever is a retriever that leverages LanceDB to efficiently store and query document embeddings. + + Attributes: + embedder (Embedder): An instance of the Embedder class used for computing embeddings. + dimensions (int): The dimensionality of the embeddings used. + db_uri (str): The URI of the LanceDB storage (default is "/tmp/lancedb"). + top_k (int): The number of top results to retrieve for a given query (default is 5). + overwrite (bool): If True, the existing table is overwritten; otherwise, new documents are appended. + + This retriever supports adding documents with their embeddings to a LanceDB storage and retrieving relevant documents based on a given query. + + More information on LanceDB can be found here:(https://github.com/lancedb/lancedb) + Documentations: https://lancedb.github.io/lancedb/ + """ + self.db = lancedb.connect(db_uri) self.embedder = embedder self.top_k = top_k @@ -31,7 +48,12 @@ def __init__(self, embedder: Embedder, dimensions: int, db_uri: str = "/tmp/lanc self.table = self.db.create_table("documents", schema=schema, mode="overwrite" if overwrite else "append") def add_documents(self, documents: Sequence[Dict[str, Any]]): - """Adds documents with pre-computed embeddings.""" + """ + Adds documents with pre-computed embeddings to LanceDB. + Args: + documents (Sequence[Dict[str, Any]]): A sequence of documents, each with a 'content' field containing text. + + """ if not documents: log.warning("No documents provided for embedding") return @@ -48,7 +70,15 @@ def add_documents(self, documents: Sequence[Dict[str, Any]]): log.info(f"Added {len(documents)} documents to the index") def retrieve(self, query: Union[str, List[str]], top_k: Optional[int] = None) -> List[RetrieverOutput]: - """Retrieve top-k documents from LanceDB for given query or queries.""" + """. + Retrieve top-k documents from LanceDB for a given query or queries. + Args: + query (Union[str, List[str]]): A query string or a list of query strings. + top_k (Optional[int]): The number of top documents to retrieve (if not specified, defaults to the instance's top_k). + + Returns: + List[RetrieverOutput]: A list of RetrieverOutput containing the indices and scores of the retrieved documents. + """ if isinstance(query, str): query = [query] From 471cb1c04930a1d4c7d4d1c7a4fd10fff6a59770 Mon Sep 17 00:00:00 2001 From: akashmangoai Date: Thu, 31 Oct 2024 14:16:53 +0530 Subject: [PATCH 29/40] fix --- adalflow/adalflow/components/retriever/lancedb_retriver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/adalflow/adalflow/components/retriever/lancedb_retriver.py b/adalflow/adalflow/components/retriever/lancedb_retriver.py index e6550e73..7cda1abc 100644 --- a/adalflow/adalflow/components/retriever/lancedb_retriver.py +++ b/adalflow/adalflow/components/retriever/lancedb_retriver.py @@ -32,7 +32,7 @@ def __init__(self, embedder: Embedder, dimensions: int, db_uri: str = "/tmp/lanc More information on LanceDB can be found here:(https://github.com/lancedb/lancedb) Documentations: https://lancedb.github.io/lancedb/ """ - + super().__init__() self.db = lancedb.connect(db_uri) self.embedder = embedder self.top_k = top_k From f44932bfe9e966a925abd8dcd91ea2060dae8eec Mon Sep 17 00:00:00 2001 From: akashmangoai Date: Thu, 31 Oct 2024 14:59:10 +0530 Subject: [PATCH 30/40] fixed test cases --- adalflow/tests/test_lancedb_retriver.py | 192 +++++++++++++----------- 1 file changed, 105 insertions(+), 87 deletions(-) diff --git a/adalflow/tests/test_lancedb_retriver.py b/adalflow/tests/test_lancedb_retriver.py index 2f52a55d..59de2c82 100644 --- a/adalflow/tests/test_lancedb_retriver.py +++ b/adalflow/tests/test_lancedb_retriver.py @@ -1,123 +1,141 @@ import unittest -from unittest.mock import Mock, MagicMock +from unittest.mock import Mock, patch import numpy as np +import pandas as pd from adalflow.components.retriever import LanceDBRetriever from adalflow.core.embedder import Embedder -from adalflow.core.types import RetrieverOutput, Document +from unittest import mock +from adalflow.core.types import EmbedderOutput, RetrieverOutput -# Mock LanceDB and PyArrow imports since they are specific to LanceDB -lancedb = MagicMock() -pa = MagicMock() +# Helper function to create dummy embeddings +def create_dummy_embeddings(num_embeddings, dim): + return np.random.rand(num_embeddings, dim).astype(np.float32) class TestLanceDBRetriever(unittest.TestCase): def setUp(self): - # Basic configuration self.dimensions = 128 + self.top_k = 5 + self.single_query = ["sample query"] self.embedder = Mock(spec=Embedder) - self.db_uri = "/tmp/test_lancedb" - # Mock embedding output with a simple structure - self.dummy_embeddings = np.random.rand(10, self.dimensions).astype(np.float32) - self.embedder.return_value.data = [ - Mock(embedding=embedding) for embedding in self.dummy_embeddings - ] - - # Initialize LanceDBRetriever with mocked embedder - self.retriever = LanceDBRetriever( - embedder=self.embedder, dimensions=self.dimensions, db_uri=self.db_uri + # Mock embedder to return dummy embeddings + self.dummy_embeddings = create_dummy_embeddings(10, self.dimensions) + self.embedder.return_value = EmbedderOutput( + data=[Mock(embedding=emb) for emb in self.dummy_embeddings[:len(self.single_query)]] ) - # Mock LanceDB table and connection - self.retriever.db.create_table = MagicMock(return_value=Mock()) - self.retriever.table = self.retriever.db.create_table.return_value + with patch("lancedb.connect") as mock_db_connect: + self.mock_db = mock_db_connect.return_value + self.mock_table = Mock() + self.mock_db.create_table.return_value = self.mock_table + self.retriever = LanceDBRetriever( + embedder=self.embedder, + dimensions=self.dimensions, + db_uri="/tmp/lancedb", + top_k=self.top_k + ) def test_initialization(self): - # Check dimensions and embedder assignment self.assertEqual(self.retriever.dimensions, self.dimensions) - self.assertEqual(self.retriever.top_k, 5) + self.assertEqual(self.retriever.top_k, self.top_k) + self.mock_db.create_table.assert_called_once() def test_add_documents(self): - # Sample documents - documents = [{"content": f"Document {i}"} for i in range(5)] + documents = [{"content": f"Document {i}"} for i in range(10)] + embeddings = create_dummy_embeddings(len(documents), self.dimensions) - # Mock LanceDB table add method - self.retriever.table.add = MagicMock() + # Mock embedding output + self.embedder.return_value = EmbedderOutput( + data=[Mock(embedding=embedding) for embedding in embeddings] + ) - # Add documents to LanceDBRetriever self.retriever.add_documents(documents) + self.assertEqual(self.mock_table.add.call_count, 1) + args, _ = self.mock_table.add.call_args + self.assertEqual(len(args[0]), len(documents)) + + def test_add_documents_no_documents(self): + self.retriever.add_documents([]) + self.mock_table.add.assert_not_called() - # Ensure add method was called - self.retriever.table.add.assert_called_once() - # Verify embeddings were passed to LanceDB add method - added_data = self.retriever.table.add.call_args[0][0] - self.assertEqual(len(added_data), len(documents)) - self.assertIn("vector", added_data[0]) - self.assertIn("content", added_data[0]) - - def test_retrieve(self): - # Prepare a sample query and mocked search result from LanceDB - query = "test query" - dummy_scores = [0.9, 0.8, 0.7] - dummy_indices = [0, 1, 2] - - # Set up mock search result as if it was retrieved from LanceDB - self.retriever.table.search = MagicMock(return_value=Mock()) - self.retriever.table.search().limit().to_pandas.return_value = Mock( - index=dummy_indices, _distance=dummy_scores + def test_retrieve_single_query(self): + query = "sample query" + query_embedding = create_dummy_embeddings(1, self.dimensions)[0] + + # Mock embedding for query + self.embedder.return_value = EmbedderOutput( + data=[Mock(embedding=query_embedding)] ) - # Retrieve top-k results for the query - result = self.retriever.retrieve(query) + # Mock search results from LanceDB as pandas DataFrame + results_df = pd.DataFrame({ + "index": [0, 1, 2], + "_distance": [0.1, 0.2, 0.3] + }) + self.mock_table.search.return_value.limit.return_value.to_pandas.return_value = results_df - # Check if retrieve method returns expected output structure - self.assertIsInstance(result, list) - self.assertEqual(len(result), 1) + result = self.retriever.retrieve(query) self.assertIsInstance(result[0], RetrieverOutput) - self.assertEqual(result[0].query, query) - self.assertEqual(result[0].doc_indices, dummy_indices) - self.assertEqual(result[0].doc_scores, dummy_scores) + self.assertEqual(len(result[0].doc_indices), 3) + self.assertEqual(len(result[0].doc_scores), 3) + self.assertListEqual(result[0].doc_indices, [0, 1, 2]) + self.assertListEqual(result[0].doc_scores, [0.1, 0.2, 0.3]) def test_retrieve_multiple_queries(self): - # Prepare multiple queries and mocked search result queries = ["query 1", "query 2"] - dummy_scores = [[0.9, 0.8], [0.85, 0.75]] - dummy_indices = [[0, 1], [2, 3]] - - # Set up mock for each query's result - self.retriever.table.search().limit().to_pandas.side_effect = [ - Mock(index=dummy_indices[0], _distance=dummy_scores[0]), - Mock(index=dummy_indices[1], _distance=dummy_scores[1]), - ] - - # Retrieve for multiple queries - results = self.retriever.retrieve(queries) - - # Verify the structure and content of the results - self.assertEqual(len(results), len(queries)) - for i, result in enumerate(results): - self.assertEqual(result.query, queries[i]) - self.assertEqual(result.doc_indices, dummy_indices[i]) - self.assertEqual(result.doc_scores, dummy_scores[i]) - - def test_empty_document_addition(self): - # Ensure warning log for empty document list - with self.assertLogs(level='WARNING'): - self.retriever.add_documents([]) + query_embeddings = create_dummy_embeddings(len(queries), self.dimensions) - def test_retrieve_with_empty_query(self): - # Check empty query handling, expecting a list with empty RetrieverOutput - result = self.retriever.retrieve("") - self.assertEqual(result, [RetrieverOutput(doc_indices=[], doc_scores=[], query="")]) + # Mock embedding for queries + self.embedder.return_value = EmbedderOutput( + data=[Mock(embedding=embedding) for embedding in query_embeddings] + ) + + # Mock search results for each query + results_df = pd.DataFrame({ + "index": [0, 1, 2], + "_distance": [0.1, 0.2, 0.3] + }) + self.mock_table.search.return_value.limit.return_value.to_pandas.return_value = results_df - def test_add_documents_embedding_failure(self): - # Simulate embedding failure - self.embedder.side_effect = Exception("Embedding failure") - documents = [{"content": "test document"}] + result = self.retriever.retrieve(queries) + self.assertEqual(len(result), len(queries)) + for res in result: + self.assertIsInstance(res, RetrieverOutput) + self.assertEqual(len(res.doc_indices), 3) + self.assertEqual(len(res.doc_scores), 3) - with self.assertRaises(Exception) as context: - self.retriever.add_documents(documents) + def test_retrieve_with_empty_query(self): + # Mock the empty results DataFrame + self.mock_table.search.return_value.limit.return_value.to_pandas.return_value = pd.DataFrame({ + "index": [], + "_distance": [] + }) + + def test_retrieve_with_no_index(self): + empty_retriever = LanceDBRetriever( + embedder=self.embedder, dimensions=self.dimensions + ) + with self.assertRaises(ValueError): + empty_retriever.retrieve("test query") + + def test_overwrite_table_on_initialization(self): + with patch("lancedb.connect") as mock_db_connect: + mock_db = mock_db_connect.return_value + mock_table = Mock() + mock_db.create_table.return_value = mock_table + + LanceDBRetriever( + embedder=self.embedder, + dimensions=self.dimensions, + db_uri="/tmp/lancedb", + overwrite=True + ) + mock_db.create_table.assert_called_once_with( + "documents", + schema=mock.ANY, + mode="overwrite" + ) - self.assertEqual(str(context.exception), "Embedding failure") if __name__ == "__main__": unittest.main() From 022181d2251aa447fbc1b7b73c4406138e95668f Mon Sep 17 00:00:00 2001 From: akashmangoai Date: Thu, 31 Oct 2024 14:59:27 +0530 Subject: [PATCH 31/40] fix --- .../adalflow/components/retriever/lancedb_retriver.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/adalflow/adalflow/components/retriever/lancedb_retriver.py b/adalflow/adalflow/components/retriever/lancedb_retriver.py index 7cda1abc..68314411 100644 --- a/adalflow/adalflow/components/retriever/lancedb_retriver.py +++ b/adalflow/adalflow/components/retriever/lancedb_retriver.py @@ -82,7 +82,13 @@ def retrieve(self, query: Union[str, List[str]], top_k: Optional[int] = None) -> if isinstance(query, str): query = [query] - # Embed the query text(s) with Embedder + if not query or (isinstance(query, str) and query.strip() == ""): + raise ValueError("Query cannot be empty.") + + # Check if table (index) exists before performing search + if not self.table: + raise ValueError("The index has not been initialized or the table is missing.") + query_embeddings = self.embedder(input=query).data output: List[RetrieverOutput] = [] From b70939379a1f9aa9421fe175d6a72be185da55f4 Mon Sep 17 00:00:00 2001 From: fm1320 Date: Tue, 10 Dec 2024 14:46:52 +0000 Subject: [PATCH 32/40] formatting changes --- .../components/retriever/lancedb_retriver.py | 59 +- adalflow/adalflow/utils/lazy_import.py | 2 +- adalflow/tests/test_lancedb_retriver.py | 40 +- docs/poetry.lock | 4573 +++++++++-------- 4 files changed, 2518 insertions(+), 2156 deletions(-) diff --git a/adalflow/adalflow/components/retriever/lancedb_retriver.py b/adalflow/adalflow/components/retriever/lancedb_retriver.py index 68314411..9390452e 100644 --- a/adalflow/adalflow/components/retriever/lancedb_retriver.py +++ b/adalflow/adalflow/components/retriever/lancedb_retriver.py @@ -11,12 +11,24 @@ log = logging.getLogger(__name__) # Defined data types -LanceDBRetrieverDocumentEmbeddingType = Union[List[float], np.ndarray] # single embedding +LanceDBRetrieverDocumentEmbeddingType = Union[ + List[float], np.ndarray +] # single embedding LanceDBRetrieverDocumentsType = Sequence[LanceDBRetrieverDocumentEmbeddingType] + # Step 2: Define the LanceDBRetriever class -class LanceDBRetriever(Retriever[LanceDBRetrieverDocumentEmbeddingType, Union[str, List[str]]]): - def __init__(self, embedder: Embedder, dimensions: int, db_uri: str = "/tmp/lancedb", top_k: int = 5, overwrite: bool = True): +class LanceDBRetriever( + Retriever[LanceDBRetrieverDocumentEmbeddingType, Union[str, List[str]]] +): + def __init__( + self, + embedder: Embedder, + dimensions: int, + db_uri: str = "/tmp/lancedb", + top_k: int = 5, + overwrite: bool = True, + ): """ LanceDBRetriever is a retriever that leverages LanceDB to efficiently store and query document embeddings. @@ -39,13 +51,17 @@ def __init__(self, embedder: Embedder, dimensions: int, db_uri: str = "/tmp/lanc self.dimensions = dimensions # Define table schema with vector field for embeddings - schema = pa.schema([ - pa.field("vector", pa.list_(pa.float32(), list_size=self.dimensions)), - pa.field("content", pa.string()) - ]) + schema = pa.schema( + [ + pa.field("vector", pa.list_(pa.float32(), list_size=self.dimensions)), + pa.field("content", pa.string()), + ] + ) # Create or overwrite the table for storing documents and embeddings - self.table = self.db.create_table("documents", schema=schema, mode="overwrite" if overwrite else "append") + self.table = self.db.create_table( + "documents", schema=schema, mode="overwrite" if overwrite else "append" + ) def add_documents(self, documents: Sequence[Dict[str, Any]]): """ @@ -63,13 +79,18 @@ def add_documents(self, documents: Sequence[Dict[str, Any]]): embeddings = self.embedder(input=doc_texts).data # Format embeddings for LanceDB - data = [{"vector": embedding.embedding, "content": text} for embedding, text in zip(embeddings, doc_texts)] + data = [ + {"vector": embedding.embedding, "content": text} + for embedding, text in zip(embeddings, doc_texts) + ] # Add data to LanceDB table self.table.add(data) log.info(f"Added {len(documents)} documents to the index") - def retrieve(self, query: Union[str, List[str]], top_k: Optional[int] = None) -> List[RetrieverOutput]: + def retrieve( + self, query: Union[str, List[str]], top_k: Optional[int] = None + ) -> List[RetrieverOutput]: """. Retrieve top-k documents from LanceDB for a given query or queries. Args: @@ -83,11 +104,13 @@ def retrieve(self, query: Union[str, List[str]], top_k: Optional[int] = None) -> query = [query] if not query or (isinstance(query, str) and query.strip() == ""): - raise ValueError("Query cannot be empty.") + raise ValueError("Query cannot be empty.") # Check if table (index) exists before performing search if not self.table: - raise ValueError("The index has not been initialized or the table is missing.") + raise ValueError( + "The index has not been initialized or the table is missing." + ) query_embeddings = self.embedder(input=query).data output: List[RetrieverOutput] = [] @@ -105,9 +128,11 @@ def retrieve(self, query: Union[str, List[str]], top_k: Optional[int] = None) -> scores = results["_distance"].tolist() # Append results to output - output.append(RetrieverOutput( - doc_indices=indices, - doc_scores=scores, - query=query[0] if len(query) == 1 else query - )) + output.append( + RetrieverOutput( + doc_indices=indices, + doc_scores=scores, + query=query[0] if len(query) == 1 else query, + ) + ) return output diff --git a/adalflow/adalflow/utils/lazy_import.py b/adalflow/adalflow/utils/lazy_import.py index 831e9906..16ad8d1f 100644 --- a/adalflow/adalflow/utils/lazy_import.py +++ b/adalflow/adalflow/utils/lazy_import.py @@ -75,7 +75,7 @@ class OptionalPackages(Enum): ) LANCEDB = ( - "lancedb", + "lancedb", "Please install lancedb with: pip install lancedb .", ) diff --git a/adalflow/tests/test_lancedb_retriver.py b/adalflow/tests/test_lancedb_retriver.py index 59de2c82..874bb090 100644 --- a/adalflow/tests/test_lancedb_retriver.py +++ b/adalflow/tests/test_lancedb_retriver.py @@ -7,10 +7,12 @@ from unittest import mock from adalflow.core.types import EmbedderOutput, RetrieverOutput + # Helper function to create dummy embeddings def create_dummy_embeddings(num_embeddings, dim): return np.random.rand(num_embeddings, dim).astype(np.float32) + class TestLanceDBRetriever(unittest.TestCase): def setUp(self): self.dimensions = 128 @@ -21,7 +23,10 @@ def setUp(self): # Mock embedder to return dummy embeddings self.dummy_embeddings = create_dummy_embeddings(10, self.dimensions) self.embedder.return_value = EmbedderOutput( - data=[Mock(embedding=emb) for emb in self.dummy_embeddings[:len(self.single_query)]] + data=[ + Mock(embedding=emb) + for emb in self.dummy_embeddings[: len(self.single_query)] + ] ) with patch("lancedb.connect") as mock_db_connect: @@ -32,7 +37,7 @@ def setUp(self): embedder=self.embedder, dimensions=self.dimensions, db_uri="/tmp/lancedb", - top_k=self.top_k + top_k=self.top_k, ) def test_initialization(self): @@ -68,11 +73,10 @@ def test_retrieve_single_query(self): ) # Mock search results from LanceDB as pandas DataFrame - results_df = pd.DataFrame({ - "index": [0, 1, 2], - "_distance": [0.1, 0.2, 0.3] - }) - self.mock_table.search.return_value.limit.return_value.to_pandas.return_value = results_df + results_df = pd.DataFrame({"index": [0, 1, 2], "_distance": [0.1, 0.2, 0.3]}) + self.mock_table.search.return_value.limit.return_value.to_pandas.return_value = ( + results_df + ) result = self.retriever.retrieve(query) self.assertIsInstance(result[0], RetrieverOutput) @@ -91,11 +95,10 @@ def test_retrieve_multiple_queries(self): ) # Mock search results for each query - results_df = pd.DataFrame({ - "index": [0, 1, 2], - "_distance": [0.1, 0.2, 0.3] - }) - self.mock_table.search.return_value.limit.return_value.to_pandas.return_value = results_df + results_df = pd.DataFrame({"index": [0, 1, 2], "_distance": [0.1, 0.2, 0.3]}) + self.mock_table.search.return_value.limit.return_value.to_pandas.return_value = ( + results_df + ) result = self.retriever.retrieve(queries) self.assertEqual(len(result), len(queries)) @@ -106,10 +109,9 @@ def test_retrieve_multiple_queries(self): def test_retrieve_with_empty_query(self): # Mock the empty results DataFrame - self.mock_table.search.return_value.limit.return_value.to_pandas.return_value = pd.DataFrame({ - "index": [], - "_distance": [] - }) + self.mock_table.search.return_value.limit.return_value.to_pandas.return_value = pd.DataFrame( + {"index": [], "_distance": []} + ) def test_retrieve_with_no_index(self): empty_retriever = LanceDBRetriever( @@ -128,12 +130,10 @@ def test_overwrite_table_on_initialization(self): embedder=self.embedder, dimensions=self.dimensions, db_uri="/tmp/lancedb", - overwrite=True + overwrite=True, ) mock_db.create_table.assert_called_once_with( - "documents", - schema=mock.ANY, - mode="overwrite" + "documents", schema=mock.ANY, mode="overwrite" ) diff --git a/docs/poetry.lock b/docs/poetry.lock index 9c079273..c601dabf 100644 --- a/docs/poetry.lock +++ b/docs/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -20,8 +20,8 @@ tests = ["hypothesis", "pytest"] [[package]] name = "adalflow" -version = "0.2.3" -description = "The Library to Build and Auto-optimize Any LLM Task Pipeline" +version = "0.2.6" +description = "The Library to Build and Auto-optimize LLM Applications" optional = false python-versions = ">=3.9, <4.0" files = [] @@ -62,108 +62,109 @@ url = "../adalflow" [[package]] name = "aiohappyeyeballs" -version = "2.3.5" +version = "2.4.4" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, + {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"}, + {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"}, ] [[package]] name = "aiohttp" -version = "3.10.2" +version = "3.11.10" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:95213b3d79c7e387144e9cb7b9d2809092d6ff2c044cb59033aedc612f38fb6d"}, - {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1aa005f060aff7124cfadaa2493f00a4e28ed41b232add5869e129a2e395935a"}, - {file = "aiohttp-3.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eabe6bf4c199687592f5de4ccd383945f485779c7ffb62a9b9f1f8a3f9756df8"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e010736fc16d21125c7e2dc5c350cd43c528b85085c04bf73a77be328fe944"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99f81f9c1529fd8e03be4a7bd7df32d14b4f856e90ef6e9cbad3415dbfa9166c"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d611d1a01c25277bcdea06879afbc11472e33ce842322496b211319aa95441bb"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00191d38156e09e8c81ef3d75c0d70d4f209b8381e71622165f22ef7da6f101"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74c091a5ded6cb81785de2d7a8ab703731f26de910dbe0f3934eabef4ae417cc"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:18186a80ec5a701816adbf1d779926e1069392cf18504528d6e52e14b5920525"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5a7ceb2a0d2280f23a02c64cd0afdc922079bb950400c3dd13a1ab2988428aac"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8bd7be6ff6c162a60cb8fce65ee879a684fbb63d5466aba3fa5b9288eb04aefa"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fae962b62944eaebff4f4fddcf1a69de919e7b967136a318533d82d93c3c6bd1"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a0fde16d284efcacbe15fb0c1013f0967b6c3e379649239d783868230bf1db42"}, - {file = "aiohttp-3.10.2-cp310-cp310-win32.whl", hash = "sha256:f81cd85a0e76ec7b8e2b6636fe02952d35befda4196b8c88f3cec5b4fb512839"}, - {file = "aiohttp-3.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:54ba10eb5a3481c28282eb6afb5f709aedf53cf9c3a31875ffbdc9fc719ffd67"}, - {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:87fab7f948e407444c2f57088286e00e2ed0003ceaf3d8f8cc0f60544ba61d91"}, - {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6ad66ed660d46503243cbec7b2b3d8ddfa020f984209b3b8ef7d98ce69c3f2"}, - {file = "aiohttp-3.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4be88807283bd96ae7b8e401abde4ca0bab597ba73b5e9a2d98f36d451e9aac"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01c98041f90927c2cbd72c22a164bb816fa3010a047d264969cf82e1d4bcf8d1"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54e36c67e1a9273ecafab18d6693da0fb5ac48fd48417e4548ac24a918c20998"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7de3ddb6f424af54535424082a1b5d1ae8caf8256ebd445be68c31c662354720"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dd9c7db94b4692b827ce51dcee597d61a0e4f4661162424faf65106775b40e7"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e57e21e1167705f8482ca29cc5d02702208d8bf4aff58f766d94bcd6ead838cd"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a1a50e59b720060c29e2951fd9f13c01e1ea9492e5a527b92cfe04dd64453c16"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:686c87782481fda5ee6ba572d912a5c26d9f98cc5c243ebd03f95222af3f1b0f"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:dafb4abb257c0ed56dc36f4e928a7341b34b1379bd87e5a15ce5d883c2c90574"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:494a6f77560e02bd7d1ab579fdf8192390567fc96a603f21370f6e63690b7f3d"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6fe8503b1b917508cc68bf44dae28823ac05e9f091021e0c41f806ebbb23f92f"}, - {file = "aiohttp-3.10.2-cp311-cp311-win32.whl", hash = "sha256:4ddb43d06ce786221c0dfd3c91b4892c318eaa36b903f7c4278e7e2fa0dd5102"}, - {file = "aiohttp-3.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:ca2f5abcb0a9a47e56bac173c01e9f6c6e7f27534d91451c5f22e6a35a5a2093"}, - {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:14eb6b17f6246959fb0b035d4f4ae52caa870c4edfb6170aad14c0de5bfbf478"}, - {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:465e445ec348d4e4bd349edd8b22db75f025da9d7b6dc1369c48e7935b85581e"}, - {file = "aiohttp-3.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:341f8ece0276a828d95b70cd265d20e257f5132b46bf77d759d7f4e0443f2906"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01fbb87b5426381cd9418b3ddcf4fc107e296fa2d3446c18ce6c76642f340a3"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c474af073e1a6763e1c5522bbb2d85ff8318197e4c6c919b8d7886e16213345"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9076810a5621236e29b2204e67a68e1fe317c8727ee4c9abbfbb1083b442c38"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f515d6859e673940e08de3922b9c4a2249653b0ac181169313bd6e4b1978ac"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:655e583afc639bef06f3b2446972c1726007a21003cd0ef57116a123e44601bc"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8da9449a575133828cc99985536552ea2dcd690e848f9d41b48d8853a149a959"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19073d57d0feb1865d12361e2a1f5a49cb764bf81a4024a3b608ab521568093a"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c8e98e1845805f184d91fda6f9ab93d7c7b0dddf1c07e0255924bfdb151a8d05"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:377220a5efde6f9497c5b74649b8c261d3cce8a84cb661be2ed8099a2196400a"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92f7f4a4dc9cdb5980973a74d43cdbb16286dacf8d1896b6c3023b8ba8436f8e"}, - {file = "aiohttp-3.10.2-cp312-cp312-win32.whl", hash = "sha256:9bb2834a6f11d65374ce97d366d6311a9155ef92c4f0cee543b2155d06dc921f"}, - {file = "aiohttp-3.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:518dc3cb37365255708283d1c1c54485bbacccd84f0a0fb87ed8917ba45eda5b"}, - {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7f98e70bbbf693086efe4b86d381efad8edac040b8ad02821453083d15ec315f"}, - {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f6f0b252a009e98fe84028a4ec48396a948e7a65b8be06ccfc6ef68cf1f614d"}, - {file = "aiohttp-3.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9360e3ffc7b23565600e729e8c639c3c50d5520e05fdf94aa2bd859eef12c407"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3988044d1635c7821dd44f0edfbe47e9875427464e59d548aece447f8c22800a"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a9d59da1543a6f1478c3436fd49ec59be3868bca561a33778b4391005e499d"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f49bdb94809ac56e09a310a62f33e5f22973d6fd351aac72a39cd551e98194"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfd2dca3f11c365d6857a07e7d12985afc59798458a2fdb2ffa4a0332a3fd43"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c1508ec97b2cd3e120bfe309a4ff8e852e8a7460f1ef1de00c2c0ed01e33c"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:49904f38667c44c041a0b44c474b3ae36948d16a0398a8f8cd84e2bb3c42a069"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:352f3a4e5f11f3241a49b6a48bc5b935fabc35d1165fa0d87f3ca99c1fcca98b"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:fc61f39b534c5d5903490478a0dd349df397d2284a939aa3cbaa2fb7a19b8397"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:ad2274e707be37420d0b6c3d26a8115295fe9d8e6e530fa6a42487a8ca3ad052"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c836bf3c7512100219fe1123743fd8dd9a2b50dd7cfb0c3bb10d041309acab4b"}, - {file = "aiohttp-3.10.2-cp38-cp38-win32.whl", hash = "sha256:53e8898adda402be03ff164b0878abe2d884e3ea03a4701e6ad55399d84b92dc"}, - {file = "aiohttp-3.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:7cc8f65f5b22304693de05a245b6736b14cb5bc9c8a03da6e2ae9ef15f8b458f"}, - {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9dfc906d656e14004c5bc672399c1cccc10db38df2b62a13fb2b6e165a81c316"}, - {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:91b10208b222ddf655c3a3d5b727879d7163db12b634492df41a9182a76edaae"}, - {file = "aiohttp-3.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fd16b5e1a7bdd14668cd6bde60a2a29b49147a535c74f50d8177d11b38433a7"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2bfdda4971bd79201f59adbad24ec2728875237e1c83bba5221284dbbf57bda"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69d73f869cf29e8a373127fc378014e2b17bcfbe8d89134bc6fb06a2f67f3cb3"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df59f8486507c421c0620a2c3dce81fbf1d54018dc20ff4fecdb2c106d6e6abc"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df930015db36b460aa9badbf35eccbc383f00d52d4b6f3de2ccb57d064a6ade"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:562b1153ab7f766ee6b8b357ec777a302770ad017cf18505d34f1c088fccc448"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d984db6d855de58e0fde1ef908d48fe9a634cadb3cf715962722b4da1c40619d"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:14dc3fcb0d877911d775d511eb617a486a8c48afca0a887276e63db04d3ee920"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b52a27a5c97275e254704e1049f4b96a81e67d6205f52fa37a4777d55b0e98ef"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:cd33d9de8cfd006a0d0fe85f49b4183c57e91d18ffb7e9004ce855e81928f704"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1238fc979160bc03a92fff9ad021375ff1c8799c6aacb0d8ea1b357ea40932bb"}, - {file = "aiohttp-3.10.2-cp39-cp39-win32.whl", hash = "sha256:e2f43d238eae4f0b04f58d4c0df4615697d4ca3e9f9b1963d49555a94f0f5a04"}, - {file = "aiohttp-3.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:947847f07a8f81d7b39b2d0202fd73e61962ebe17ac2d8566f260679e467da7b"}, - {file = "aiohttp-3.10.2.tar.gz", hash = "sha256:4d1f694b5d6e459352e5e925a42e05bac66655bfde44d81c59992463d2897014"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d"}, + {file = "aiohttp-3.11.10-cp310-cp310-win32.whl", hash = "sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91"}, + {file = "aiohttp-3.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3"}, + {file = "aiohttp-3.11.10-cp311-cp311-win32.whl", hash = "sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4"}, + {file = "aiohttp-3.11.10-cp311-cp311-win_amd64.whl", hash = "sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec"}, + {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b78f053a7ecfc35f0451d961dacdc671f4bcbc2f58241a7c820e9d82559844cf"}, + {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab7485222db0959a87fbe8125e233b5a6f01f4400785b36e8a7878170d8c3138"}, + {file = "aiohttp-3.11.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf14627232dfa8730453752e9cdc210966490992234d77ff90bc8dc0dce361d5"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076bc454a7e6fd646bc82ea7f98296be0b1219b5e3ef8a488afbdd8e81fbac50"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:482cafb7dc886bebeb6c9ba7925e03591a62ab34298ee70d3dd47ba966370d2c"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf3d1a519a324af764a46da4115bdbd566b3c73fb793ffb97f9111dbc684fc4d"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24213ba85a419103e641e55c27dc7ff03536c4873470c2478cce3311ba1eee7b"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b99acd4730ad1b196bfb03ee0803e4adac371ae8efa7e1cbc820200fc5ded109"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:14cdb5a9570be5a04eec2ace174a48ae85833c2aadc86de68f55541f66ce42ab"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7e97d622cb083e86f18317282084bc9fbf261801b0192c34fe4b1febd9f7ae69"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:012f176945af138abc10c4a48743327a92b4ca9adc7a0e078077cdb5dbab7be0"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44224d815853962f48fe124748227773acd9686eba6dc102578defd6fc99e8d9"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c87bf31b7fdab94ae3adbe4a48e711bfc5f89d21cf4c197e75561def39e223bc"}, + {file = "aiohttp-3.11.10-cp312-cp312-win32.whl", hash = "sha256:06a8e2ee1cbac16fe61e51e0b0c269400e781b13bcfc33f5425912391a542985"}, + {file = "aiohttp-3.11.10-cp312-cp312-win_amd64.whl", hash = "sha256:be2b516f56ea883a3e14dda17059716593526e10fb6303189aaf5503937db408"}, + {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816"}, + {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf"}, + {file = "aiohttp-3.11.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836"}, + {file = "aiohttp-3.11.10-cp313-cp313-win32.whl", hash = "sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c"}, + {file = "aiohttp-3.11.10-cp313-cp313-win_amd64.whl", hash = "sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4"}, + {file = "aiohttp-3.11.10-cp39-cp39-win32.whl", hash = "sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be"}, + {file = "aiohttp-3.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74"}, + {file = "aiohttp-3.11.10.tar.gz", hash = "sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e"}, ] [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] @@ -231,65 +232,65 @@ vertex = ["google-auth (>=2,<3)"] [[package]] name = "anyio" -version = "4.4.0" +version = "4.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, + {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "async-timeout" -version = "4.0.3" +version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "azure-core" -version = "1.31.0" +version = "1.32.0" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure_core-1.31.0-py3-none-any.whl", hash = "sha256:22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd"}, - {file = "azure_core-1.31.0.tar.gz", hash = "sha256:656a0dd61e1869b1506b7c6a3b31d62f15984b1a573d6326f6aa2f3e4123284b"}, + {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"}, + {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"}, ] [package.dependencies] @@ -302,13 +303,13 @@ aio = ["aiohttp (>=3.0)"] [[package]] name = "azure-identity" -version = "1.18.0" +version = "1.19.0" description = "Microsoft Azure Identity Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure_identity-1.18.0-py3-none-any.whl", hash = "sha256:bccf6106245b49ff41d0c4cd7b72851c5a2ba3a32cef7589da246f5727f26f02"}, - {file = "azure_identity-1.18.0.tar.gz", hash = "sha256:f567579a65d8932fa913c76eddf3305101a15e5727a5e4aa5df649a0f553d4c3"}, + {file = "azure_identity-1.19.0-py3-none-any.whl", hash = "sha256:e3f6558c181692d7509f09de10cca527c7dce426776454fb97df512a46527e81"}, + {file = "azure_identity-1.19.0.tar.gz", hash = "sha256:500144dc18197d7019b81501165d4fa92225f03778f17d7ca8a2a180129a9c83"}, ] [package.dependencies] @@ -320,13 +321,13 @@ typing-extensions = ">=4.0.0" [[package]] name = "babel" -version = "2.15.0" +version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] @@ -366,50 +367,30 @@ lxml = ["lxml"] [[package]] name = "bleach" -version = "6.1.0" +version = "6.2.0" description = "An easy safelist-based HTML-sanitizing tool." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, + {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, + {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, ] [package.dependencies] -six = ">=1.9.0" webencodings = "*" [package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] - -[[package]] -name = "boto3" -version = "1.34.144" -description = "The AWS SDK for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "boto3-1.34.144-py3-none-any.whl", hash = "sha256:b8433d481d50b68a0162c0379c0dd4aabfc3d1ad901800beb5b87815997511c1"}, - {file = "boto3-1.34.144.tar.gz", hash = "sha256:2f3e88b10b8fcc5f6100a9d74cd28230edc9d4fa226d99dd40a3ab38ac213673"}, -] - -[package.dependencies] -botocore = ">=1.34.144,<1.35.0" -jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.10.0,<0.11.0" - -[package.extras] -crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] +css = ["tinycss2 (>=1.1.0,<1.5)"] [[package]] name = "botocore" -version = "1.34.153" +version = "1.35.77" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.153-py3-none-any.whl", hash = "sha256:9fc2ad40be8c103ab9bfcb48b97b117d299d0b3a542cdd30134ee2935bee827a"}, - {file = "botocore-1.34.153.tar.gz", hash = "sha256:1634a00f996cfff67f0fd4d0ddc436bc3318b2202dfd82ad0bc11c7169694092"}, + {file = "botocore-1.35.77-py3-none-any.whl", hash = "sha256:3faa27d65841499762228902d7e215fa99a4c2fdc76c9113e1c3f339bdf685b8"}, + {file = "botocore-1.35.77.tar.gz", hash = "sha256:17b778016644e9342ca3ff2f430c1d1db0c6126e9b41a57cff52ac58e7a455e0"}, ] [package.dependencies] @@ -421,89 +402,104 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.20.11)"] +crt = ["awscrt (==0.22.0)"] [[package]] name = "cachetools" -version = "5.4.0" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -511,126 +507,145 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] name = "cohere" -version = "5.6.1" +version = "5.13.3" description = "" optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "cohere-5.6.1-py3-none-any.whl", hash = "sha256:1c8bcd39a54622d64b83cafb865f102cd2565ce091b0856fd5ce11bf7169109a"}, - {file = "cohere-5.6.1.tar.gz", hash = "sha256:5d7efda64f0e512d4cc35aa04b17a6f74b3d8c175a99f2797991a7f31dfac349"}, + {file = "cohere-5.13.3-py3-none-any.whl", hash = "sha256:076c88fdd3d670b6577eb8e813a9072bf18b59648d4092c6f0263af3c27bf81f"}, + {file = "cohere-5.13.3.tar.gz", hash = "sha256:70d87e0d5ce48aaee5ba70ead5efbade226cb2a4b11bfcfb676f6a2db3642819"}, ] [package.dependencies] -boto3 = ">=1.34.0,<2.0.0" fastavro = ">=1.9.4,<2.0.0" httpx = ">=0.21.2" -httpx-sse = ">=0.4.0,<0.5.0" +httpx-sse = "0.4.0" +numpy = ">=1.26,<2.0" parameterized = ">=0.9.0,<0.10.0" pydantic = ">=1.9.2" +pydantic-core = ">=2.18.2,<3.0.0" requests = ">=2.0.0,<3.0.0" tokenizers = ">=0.15,<1" types-requests = ">=2.0.0,<3.0.0" typing_extensions = ">=4.0.0" +[package.extras] +aws = ["boto3 (>=1.34.0,<2.0.0)", "sagemaker (>=2.232.1,<3.0.0)"] + [[package]] name = "colorama" version = "0.4.6" @@ -644,38 +659,38 @@ files = [ [[package]] name = "cryptography" -version = "43.0.1" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, - {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, - {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, - {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, - {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, - {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, - {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] @@ -688,32 +703,31 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "datasets" -version = "2.20.0" +version = "2.21.0" description = "HuggingFace community-driven open-source library of datasets" optional = false python-versions = ">=3.8.0" files = [ - {file = "datasets-2.20.0-py3-none-any.whl", hash = "sha256:76ac02e3bdfff824492e20678f0b6b1b6d080515957fe834b00c2ba8d6b18e5e"}, - {file = "datasets-2.20.0.tar.gz", hash = "sha256:3c4dbcd27e0f642b9d41d20ff2efa721a5e04b32b2ca4009e0fc9139e324553f"}, + {file = "datasets-2.21.0-py3-none-any.whl", hash = "sha256:25e4e097110ce28824b746a107727ada94024cba11db8bc588d468414692b65a"}, + {file = "datasets-2.21.0.tar.gz", hash = "sha256:998f85a8460f1bd982e5bd058f8a0808eef424249e3df1e8cdd594ccd0dc8ba2"}, ] [package.dependencies] aiohttp = "*" dill = ">=0.3.0,<0.3.9" filelock = "*" -fsspec = {version = ">=2023.1.0,<=2024.5.0", extras = ["http"]} +fsspec = {version = ">=2023.1.0,<=2024.6.1", extras = ["http"]} huggingface-hub = ">=0.21.2" multiprocess = "*" numpy = ">=1.17" packaging = "*" pandas = "*" pyarrow = ">=15.0.0" -pyarrow-hotfix = "*" pyyaml = ">=5.1" requests = ">=2.32.2" tqdm = ">=4.66.3" @@ -721,20 +735,32 @@ xxhash = "*" [package.extras] apache-beam = ["apache-beam (>=2.26.0)"] -audio = ["librosa", "soundfile (>=0.12.1)"] +audio = ["librosa", "soundfile (>=0.12.1)", "soxr (>=0.4.0)"] benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"] -dev = ["Pillow (>=9.4.0)", "absl-py", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "ruff (>=0.3.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.6.0)", "tiktoken", "torch", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"] +dev = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.8.0.post1)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "ruff (>=0.3.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0)", "sqlalchemy", "tensorflow (>=2.16.0)", "tensorflow (>=2.6.0)", "tensorflow (>=2.6.0)", "tiktoken", "torch", "torch (>=2.0.0)", "transformers", "transformers (>=4.42.0)", "typing-extensions (>=4.6.1)", "zstandard"] docs = ["s3fs", "tensorflow (>=2.6.0)", "torch", "transformers"] jax = ["jax (>=0.3.14)", "jaxlib (>=0.3.14)"] -metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] +metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk (<3.8.2)", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] quality = ["ruff (>=0.3.0)"] s3 = ["s3fs"] tensorflow = ["tensorflow (>=2.6.0)"] tensorflow-gpu = ["tensorflow (>=2.6.0)"] -tests = ["Pillow (>=9.4.0)", "absl-py", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.6.0)", "tiktoken", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"] +tests = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.8.0.post1)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0)", "sqlalchemy", "tensorflow (>=2.16.0)", "tensorflow (>=2.6.0)", "tiktoken", "torch (>=2.0.0)", "transformers (>=4.42.0)", "typing-extensions (>=4.6.1)", "zstandard"] +tests-numpy2 = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0)", "sqlalchemy", "tiktoken", "torch (>=2.0.0)", "typing-extensions (>=4.6.1)", "zstandard"] torch = ["torch"] vision = ["Pillow (>=9.4.0)"] +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "defusedxml" version = "0.7.1" @@ -746,6 +772,20 @@ files = [ {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + [[package]] name = "dill" version = "0.3.8" @@ -810,81 +850,81 @@ test = ["pytest (>=6)"] [[package]] name = "faiss-cpu" -version = "1.8.0.post1" +version = "1.9.0.post1" description = "A library for efficient similarity search and clustering of dense vectors." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:fd84721eb599aa1da19b1b36345bb8705a60bb1d2887bbbc395a29e3d36a1a62"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b78ff9079d15fd0f156bf5dd8a2975a8abffac1854a86ece263eec1500a2e836"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de25c943d1789e35fe06a20884c88cd32aedbb1a33bb8da2238cdea7bd9633f"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adae0f1b144e7216da696f14bc4991ca4300c94baaa59247c3d322588e661c95"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-win_amd64.whl", hash = "sha256:00345290680a444a4b4cb2d98a3844bb5c401a2160fee547c7631d759fd2ec3e"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:8d4bade10cb63e9f9ff261751edd7eb097b1f4bf30be4d0d25d6f688559d795e"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20bd43eca3b7d77e71ea56b7a558cc28e900d8abff417eb285e2d92e95d934d4"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8542a87743a7f94ac656fd3e9592ad57e58b04d961ad2fe654a22a8ca59defdb"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed46928de3dc20170b10fec89c54075a11383c2aaf4f119c63e0f6ae5a507d74"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-win_amd64.whl", hash = "sha256:4fa5fc8ea210b919aa469e27d6687e50052db906e7fec3f2257178b1384fa18b"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:96aec0d08a3099883af3a9b6356cfe736e8bd879318a940a27e9d1ae6f33d788"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:92b06147fa84732ecdc965922e8ef50dc7011ef8be65821ff4abb2118cb5dce0"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:709ef9394d1148aef70dbe890edbde8c282a4a2e06a8b69ab64f65e90f5ba572"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:327a9c30971bf72cd8392b15eb4aff5d898c453212eae656dfaa3ba555b9ca0c"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-win_amd64.whl", hash = "sha256:8756f1d93faba56349883fa2f5d47fe36bb2f11f789200c6b1c691ef805485f2"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:f4a3045909c447bf1955b70083891e80f2c87c5427f20cae25245e08ec5c9e52"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8842b7fc921ca1fafdb0845f2ba029e79df04eebae72ab135239f93478a9b7a2"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d5a9799634e32c3862d5436d1e78112ed9a38f319e4523f5916e55d86adda8f"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a70923b0fbbb40f647e20bcbcbfd472277e6d84bb23ff12d2a94b6841806b55"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-win_amd64.whl", hash = "sha256:ce652df3c4dd50c88ac9235d072f30ce60694dc422c5f523bbbcab320e8f3097"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:83ef04b17b19189dd6601a941bdf4bfa9de0740dbcd80305aeba51a1b1955f80"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c50c8697077470ede7f1939ef8dc8a846ec19cf1893b543f6b67f9af03b0a122"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ce428a7a67fe5c64047280e5e12a8dbdecf7002f9d127b26cf1db354e9fe76"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3b36b80380bae523e3198cfb4a137867055945ce7bf10d18fe9f0284f2fb47"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-win_amd64.whl", hash = "sha256:4fcc67a2353f08a20c1ab955de3cde14ef3b447761b26244a5aa849c15cbc9b3"}, - {file = "faiss_cpu-1.8.0.post1.tar.gz", hash = "sha256:5686af34414678c3d49c4fa8d774df7156e9cb48d7029071e56230e74b01cc13"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:e18602465f5a96c3c973ab440f9263a0881034fb54810be20bc8cdb8b069456d"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5dddeecdb68fb95b4a3343a6ff89498fd7c222726706538f360132bfe3d8aebe"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15d2d7e522e6d55dbf14e57fcac1d38d62c95479b847562004f9e7c97c139ee8"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ffbbb1ec9ae503df1fcdfd5c3a8594d8b76fb4b8ebf0a697c1492f1f9cec1a"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-win_amd64.whl", hash = "sha256:29cae0dfa6c286c043d45572a39288f5a56ffb694a20a90c6946018241002d90"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:7ef0c81a798a64fc932e15d560ddc01021df9ed70b678367aec6e01f39d075c1"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:783f545c3999909164a975b97d99749b244b62651ce976ee76b8a171c62e827d"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0408261ed85d0bd8e30716a3fd441d0c51a5563cf3a795a488eab9c492ea33"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7068e14e8f557659c68bdf4d511571630721e1502efa87a70fe44023f3741645"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-win_amd64.whl", hash = "sha256:274a66868a498687641faf964f6eddbe70ccb5bee56239862ee0aa079415779e"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:ae3fbe0f26c05bef26c626f9e293cc4dd0e685ec02d64100c686276a8c14bf88"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b4d5e79643a09d91d339ba7609fb2e9b3ce6de3cd069b9183e97a843261e0e8"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bd1a0412528202e4a4cc38953f81bb7d9b9a783881fa06d822b717a1b090bdd"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4a499aa20b00266c78b9768de962e6a8dd2e2b2eb3d02aa4c41af4c6913eeba"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-win_amd64.whl", hash = "sha256:d6920f2db8581eb6dcd519c024120061d7d68bc075d494e59b1b2af9a1729d03"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:10e38642c5f147642c4aa8a6c1704fb1900b2b8dd5f33b49a45fa5a67df4837d"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ec25338fc06fa8aa6ef5c7a2ba9f1aa03f64f9b38ba82402a6495cc981426571"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2951be3d2713a128e7f625a4b508419238b6c09cce747a0de7708bdcf1b7e3d6"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6467aafa148d39e6e9bc26c1d84e07f16cbf910297a90ec2e8597cf69772a82"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-win_amd64.whl", hash = "sha256:87a224a01a4ad80e0f849b2b2b1fba8b197e5803416ea861faf1b0de255871ea"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a88ebc13a6827b2520eddd5f1c948f377c34cc07571ce6e4c0e6af0696a77288"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0241d7441c291a64a26f8ae3d9c23adccb392524a4a5555b32aff08c7c1766f2"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aafa02b77e9c94b858cf86bc69bfa72a3754b5cfe8a0e9c1c70c6cf5c8c6b0a6"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba6e57971d7b112eb372d805a809b36573f50c10a08a7ecc97e4039ec369a1f6"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-win_amd64.whl", hash = "sha256:b4eeb44949805d4a88de507636b01382da0527280a64ecb99bc4eb596a1a81e5"}, + {file = "faiss_cpu-1.9.0.post1.tar.gz", hash = "sha256:920725d485aab05dd87d34ef63257332441e9b53d382069f034996465827143a"}, ] [package.dependencies] -numpy = ">=1.0,<2.0" +numpy = ">=1.25.0,<3.0" packaging = "*" [[package]] name = "fastavro" -version = "1.9.5" +version = "1.9.7" description = "Fast read/write of AVRO files" optional = false python-versions = ">=3.8" files = [ - {file = "fastavro-1.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:61253148e95dd2b6457247b441b7555074a55de17aef85f5165bfd5facf600fc"}, - {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b604935d671ad47d888efc92a106f98e9440874108b444ac10e28d643109c937"}, - {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0adbf4956fd53bd74c41e7855bb45ccce953e0eb0e44f5836d8d54ad843f9944"}, - {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:53d838e31457db8bf44460c244543f75ed307935d5fc1d93bc631cc7caef2082"}, - {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:07b6288e8681eede16ff077632c47395d4925c2f51545cd7a60f194454db2211"}, - {file = "fastavro-1.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:ef08cf247fdfd61286ac0c41854f7194f2ad05088066a756423d7299b688d975"}, - {file = "fastavro-1.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c52d7bb69f617c90935a3e56feb2c34d4276819a5c477c466c6c08c224a10409"}, - {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e05969956003df8fa4491614bc62fe40cec59e94d06e8aaa8d8256ee3aab82"}, - {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06e6df8527493a9f0d9a8778df82bab8b1aa6d80d1b004e5aec0a31dc4dc501c"}, - {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:27820da3b17bc01cebb6d1687c9d7254b16d149ef458871aaa207ed8950f3ae6"}, - {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:195a5b8e33eb89a1a9b63fa9dce7a77d41b3b0cd785bac6044df619f120361a2"}, - {file = "fastavro-1.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:be612c109efb727bfd36d4d7ed28eb8e0506617b7dbe746463ebbf81e85eaa6b"}, - {file = "fastavro-1.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b133456c8975ec7d2a99e16a7e68e896e45c821b852675eac4ee25364b999c14"}, - {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf586373c3d1748cac849395aad70c198ee39295f92e7c22c75757b5c0300fbe"}, - {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:724ef192bc9c55d5b4c7df007f56a46a21809463499856349d4580a55e2b914c"}, - {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bfd11fe355a8f9c0416803afac298960eb4c603a23b1c74ff9c1d3e673ea7185"}, - {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9827d1654d7bcb118ef5efd3e5b2c9ab2a48d44dac5e8c6a2327bc3ac3caa828"}, - {file = "fastavro-1.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:d84b69dca296667e6137ae7c9a96d060123adbc0c00532cc47012b64d38b47e9"}, - {file = "fastavro-1.9.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:fb744e9de40fb1dc75354098c8db7da7636cba50a40f7bef3b3fb20f8d189d88"}, - {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:240df8bacd13ff5487f2465604c007d686a566df5cbc01d0550684eaf8ff014a"}, - {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3bb35c25bbc3904e1c02333bc1ae0173e0a44aa37a8e95d07e681601246e1f1"}, - {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b47a54a9700de3eabefd36dabfb237808acae47bc873cada6be6990ef6b165aa"}, - {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:48c7b5e6d2f3bf7917af301c275b05c5be3dd40bb04e80979c9e7a2ab31a00d1"}, - {file = "fastavro-1.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:05d13f98d4e325be40387e27da9bd60239968862fe12769258225c62ec906f04"}, - {file = "fastavro-1.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5b47948eb196263f6111bf34e1cd08d55529d4ed46eb50c1bc8c7c30a8d18868"}, - {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85b7a66ad521298ad9373dfe1897a6ccfc38feab54a47b97922e213ae5ad8870"}, - {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44cb154f863ad80e41aea72a709b12e1533b8728c89b9b1348af91a6154ab2f5"}, - {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5f7f2b1fe21231fd01f1a2a90e714ae267fe633cd7ce930c0aea33d1c9f4901"}, - {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88fbbe16c61d90a89d78baeb5a34dc1c63a27b115adccdbd6b1fb6f787deacf2"}, - {file = "fastavro-1.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:753f5eedeb5ca86004e23a9ce9b41c5f25eb64a876f95edcc33558090a7f3e4b"}, - {file = "fastavro-1.9.5.tar.gz", hash = "sha256:6419ebf45f88132a9945c51fe555d4f10bb97c236288ed01894f957c6f914553"}, + {file = "fastavro-1.9.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc811fb4f7b5ae95f969cda910241ceacf82e53014c7c7224df6f6e0ca97f52f"}, + {file = "fastavro-1.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb8749e419a85f251bf1ac87d463311874972554d25d4a0b19f6bdc56036d7cf"}, + {file = "fastavro-1.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b2f9bafa167cb4d1c3dd17565cb5bf3d8c0759e42620280d1760f1e778e07fc"}, + {file = "fastavro-1.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e87d04b235b29f7774d226b120da2ca4e60b9e6fdf6747daef7f13f218b3517a"}, + {file = "fastavro-1.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b525c363e267ed11810aaad8fbdbd1c3bd8837d05f7360977d72a65ab8c6e1fa"}, + {file = "fastavro-1.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:6312fa99deecc319820216b5e1b1bd2d7ebb7d6f221373c74acfddaee64e8e60"}, + {file = "fastavro-1.9.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ec8499dc276c2d2ef0a68c0f1ad11782b2b956a921790a36bf4c18df2b8d4020"}, + {file = "fastavro-1.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d9d96f98052615ab465c63ba8b76ed59baf2e3341b7b169058db104cbe2aa0"}, + {file = "fastavro-1.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919f3549e07a8a8645a2146f23905955c35264ac809f6c2ac18142bc5b9b6022"}, + {file = "fastavro-1.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9de1fa832a4d9016724cd6facab8034dc90d820b71a5d57c7e9830ffe90f31e4"}, + {file = "fastavro-1.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1d09227d1f48f13281bd5ceac958650805aef9a4ef4f95810128c1f9be1df736"}, + {file = "fastavro-1.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:2db993ae6cdc63e25eadf9f93c9e8036f9b097a3e61d19dca42536dcc5c4d8b3"}, + {file = "fastavro-1.9.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4e1289b731214a7315884c74b2ec058b6e84380ce9b18b8af5d387e64b18fc44"}, + {file = "fastavro-1.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac69666270a76a3a1d0444f39752061195e79e146271a568777048ffbd91a27"}, + {file = "fastavro-1.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9be089be8c00f68e343bbc64ca6d9a13e5e5b0ba8aa52bcb231a762484fb270e"}, + {file = "fastavro-1.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d576eccfd60a18ffa028259500df67d338b93562c6700e10ef68bbd88e499731"}, + {file = "fastavro-1.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee9bf23c157bd7dcc91ea2c700fa3bd924d9ec198bb428ff0b47fa37fe160659"}, + {file = "fastavro-1.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:b6b2ccdc78f6afc18c52e403ee68c00478da12142815c1bd8a00973138a166d0"}, + {file = "fastavro-1.9.7-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:7313def3aea3dacface0a8b83f6d66e49a311149aa925c89184a06c1ef99785d"}, + {file = "fastavro-1.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:536f5644737ad21d18af97d909dba099b9e7118c237be7e4bd087c7abde7e4f0"}, + {file = "fastavro-1.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2af559f30383b79cf7d020a6b644c42ffaed3595f775fe8f3d7f80b1c43dfdc5"}, + {file = "fastavro-1.9.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:edc28ab305e3c424de5ac5eb87b48d1e07eddb6aa08ef5948fcda33cc4d995ce"}, + {file = "fastavro-1.9.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ec2e96bdabd58427fe683329b3d79f42c7b4f4ff6b3644664a345a655ac2c0a1"}, + {file = "fastavro-1.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:3b683693c8a85ede496ebebe115be5d7870c150986e34a0442a20d88d7771224"}, + {file = "fastavro-1.9.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:58f76a5c9a312fbd37b84e49d08eb23094d36e10d43bc5df5187bc04af463feb"}, + {file = "fastavro-1.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56304401d2f4f69f5b498bdd1552c13ef9a644d522d5de0dc1d789cf82f47f73"}, + {file = "fastavro-1.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fcce036c6aa06269fc6a0428050fcb6255189997f5e1a728fc461e8b9d3e26b"}, + {file = "fastavro-1.9.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:17de68aae8c2525f5631d80f2b447a53395cdc49134f51b0329a5497277fc2d2"}, + {file = "fastavro-1.9.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7c911366c625d0a997eafe0aa83ffbc6fd00d8fd4543cb39a97c6f3b8120ea87"}, + {file = "fastavro-1.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:912283ed48578a103f523817fdf0c19b1755cea9b4a6387b73c79ecb8f8f84fc"}, + {file = "fastavro-1.9.7.tar.gz", hash = "sha256:13e11c6cb28626da85290933027cd419ce3f9ab8e45410ef24ce6b89d20a1f6c"}, ] [package.extras] @@ -895,13 +935,13 @@ zstandard = ["zstandard"] [[package]] name = "fastjsonschema" -version = "2.20.0" +version = "2.21.1" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, - {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, + {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, + {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, ] [package.extras] @@ -909,115 +949,130 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "frozenlist" -version = "1.4.1" +version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, + {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, + {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, + {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, + {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, + {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, + {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, + {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, + {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"}, + {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"}, + {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, + {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, + {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, + {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, + {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, ] [[package]] name = "fsspec" -version = "2024.5.0" +version = "2024.6.1" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.5.0-py3-none-any.whl", hash = "sha256:e0fdbc446d67e182f49a70b82cf7889028a63588fde6b222521f10937b2b670c"}, - {file = "fsspec-2024.5.0.tar.gz", hash = "sha256:1d021b0b0f933e3b3029ed808eb400c08ba101ca2de4b3483fbc9ca23fcee94a"}, + {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, + {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, ] [package.dependencies] @@ -1029,6 +1084,7 @@ adl = ["adlfs"] arrow = ["pyarrow (>=1)"] dask = ["dask", "distributed"] dev = ["pre-commit", "ruff"] +doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] dropbox = ["dropbox", "dropboxdrivefs", "requests"] full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] fuse = ["fusepy"] @@ -1069,13 +1125,13 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 [[package]] name = "google-api-core" -version = "2.19.1" +version = "2.24.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, - {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, + {file = "google_api_core-2.24.0-py3-none-any.whl", hash = "sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9"}, + {file = "google_api_core-2.24.0.tar.gz", hash = "sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf"}, ] [package.dependencies] @@ -1089,24 +1145,28 @@ grpcio-status = [ {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] -proto-plus = ">=1.22.3,<2.0.0dev" +proto-plus = [ + {version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""}, + {version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""}, +] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" [package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.137.0" +version = "2.154.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_python_client-2.137.0-py2.py3-none-any.whl", hash = "sha256:a8b5c5724885e5be9f5368739aa0ccf416627da4ebd914b410a090c18f84d692"}, - {file = "google_api_python_client-2.137.0.tar.gz", hash = "sha256:e739cb74aac8258b1886cb853b0722d47c81fe07ad649d7f2206f06530513c04"}, + {file = "google_api_python_client-2.154.0-py2.py3-none-any.whl", hash = "sha256:a521bbbb2ec0ba9d6f307cdd64ed6e21eeac372d1bd7493a4ab5022941f784ad"}, + {file = "google_api_python_client-2.154.0.tar.gz", hash = "sha256:1b420062e03bfcaa1c79e2e00a612d29a6a934151ceb3d272fe150a656dc8f17"}, ] [package.dependencies] @@ -1118,13 +1178,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.32.0" +version = "2.36.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"}, - {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"}, + {file = "google_auth-2.36.0-py2.py3-none-any.whl", hash = "sha256:51a15d47028b66fd36e5c64a82d2d57480075bccc7da37cde257fc94177a61fb"}, + {file = "google_auth-2.36.0.tar.gz", hash = "sha256:545e9618f2df0bcbb7dcbc45a546485b1212624716975a1ea5ae8149ce769ab1"}, ] [package.dependencies] @@ -1134,7 +1194,7 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] @@ -1179,13 +1239,13 @@ dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "py [[package]] name = "googleapis-common-protos" -version = "1.63.2" +version = "1.66.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, - {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, + {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, + {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, ] [package.dependencies] @@ -1196,69 +1256,84 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -1286,76 +1361,85 @@ typing-extensions = ">=4.7,<5" [[package]] name = "grpcio" -version = "1.64.1" +version = "1.68.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, - {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, - {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, - {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, - {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, - {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, - {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, - {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, - {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, - {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, - {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, - {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, - {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, - {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, - {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, - {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, - {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, - {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, - {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, - {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, - {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, + {file = "grpcio-1.68.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:d35740e3f45f60f3c37b1e6f2f4702c23867b9ce21c6410254c9c682237da68d"}, + {file = "grpcio-1.68.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d99abcd61760ebb34bdff37e5a3ba333c5cc09feda8c1ad42547bea0416ada78"}, + {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f8261fa2a5f679abeb2a0a93ad056d765cdca1c47745eda3f2d87f874ff4b8c9"}, + {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0feb02205a27caca128627bd1df4ee7212db051019a9afa76f4bb6a1a80ca95e"}, + {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919d7f18f63bcad3a0f81146188e90274fde800a94e35d42ffe9eadf6a9a6330"}, + {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:963cc8d7d79b12c56008aabd8b457f400952dbea8997dd185f155e2f228db079"}, + {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ccf2ebd2de2d6661e2520dae293298a3803a98ebfc099275f113ce1f6c2a80f1"}, + {file = "grpcio-1.68.1-cp310-cp310-win32.whl", hash = "sha256:2cc1fd04af8399971bcd4f43bd98c22d01029ea2e56e69c34daf2bf8470e47f5"}, + {file = "grpcio-1.68.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2e743e51cb964b4975de572aa8fb95b633f496f9fcb5e257893df3be854746"}, + {file = "grpcio-1.68.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:55857c71641064f01ff0541a1776bfe04a59db5558e82897d35a7793e525774c"}, + {file = "grpcio-1.68.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4b177f5547f1b995826ef529d2eef89cca2f830dd8b2c99ffd5fde4da734ba73"}, + {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:3522c77d7e6606d6665ec8d50e867f13f946a4e00c7df46768f1c85089eae515"}, + {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d1fae6bbf0816415b81db1e82fb3bf56f7857273c84dcbe68cbe046e58e1ccd"}, + {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:298ee7f80e26f9483f0b6f94cc0a046caf54400a11b644713bb5b3d8eb387600"}, + {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cbb5780e2e740b6b4f2d208e90453591036ff80c02cc605fea1af8e6fc6b1bbe"}, + {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ddda1aa22495d8acd9dfbafff2866438d12faec4d024ebc2e656784d96328ad0"}, + {file = "grpcio-1.68.1-cp311-cp311-win32.whl", hash = "sha256:b33bd114fa5a83f03ec6b7b262ef9f5cac549d4126f1dc702078767b10c46ed9"}, + {file = "grpcio-1.68.1-cp311-cp311-win_amd64.whl", hash = "sha256:7f20ebec257af55694d8f993e162ddf0d36bd82d4e57f74b31c67b3c6d63d8b2"}, + {file = "grpcio-1.68.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:8829924fffb25386995a31998ccbbeaa7367223e647e0122043dfc485a87c666"}, + {file = "grpcio-1.68.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3aed6544e4d523cd6b3119b0916cef3d15ef2da51e088211e4d1eb91a6c7f4f1"}, + {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:4efac5481c696d5cb124ff1c119a78bddbfdd13fc499e3bc0ca81e95fc573684"}, + {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ab2d912ca39c51f46baf2a0d92aa265aa96b2443266fc50d234fa88bf877d8e"}, + {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c87ce2a97434dffe7327a4071839ab8e8bffd0054cc74cbe971fba98aedd60"}, + {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e4842e4872ae4ae0f5497bf60a0498fa778c192cc7a9e87877abd2814aca9475"}, + {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:255b1635b0ed81e9f91da4fcc8d43b7ea5520090b9a9ad9340d147066d1d3613"}, + {file = "grpcio-1.68.1-cp312-cp312-win32.whl", hash = "sha256:7dfc914cc31c906297b30463dde0b9be48e36939575eaf2a0a22a8096e69afe5"}, + {file = "grpcio-1.68.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0c8ddabef9c8f41617f213e527254c41e8b96ea9d387c632af878d05db9229c"}, + {file = "grpcio-1.68.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:a47faedc9ea2e7a3b6569795c040aae5895a19dde0c728a48d3c5d7995fda385"}, + {file = "grpcio-1.68.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:390eee4225a661c5cd133c09f5da1ee3c84498dc265fd292a6912b65c421c78c"}, + {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:66a24f3d45c33550703f0abb8b656515b0ab777970fa275693a2f6dc8e35f1c1"}, + {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c08079b4934b0bf0a8847f42c197b1d12cba6495a3d43febd7e99ecd1cdc8d54"}, + {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8720c25cd9ac25dd04ee02b69256d0ce35bf8a0f29e20577427355272230965a"}, + {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:04cfd68bf4f38f5bb959ee2361a7546916bd9a50f78617a346b3aeb2b42e2161"}, + {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c28848761a6520c5c6071d2904a18d339a796ebe6b800adc8b3f474c5ce3c3ad"}, + {file = "grpcio-1.68.1-cp313-cp313-win32.whl", hash = "sha256:77d65165fc35cff6e954e7fd4229e05ec76102d4406d4576528d3a3635fc6172"}, + {file = "grpcio-1.68.1-cp313-cp313-win_amd64.whl", hash = "sha256:a8040f85dcb9830d8bbb033ae66d272614cec6faceee88d37a88a9bd1a7a704e"}, + {file = "grpcio-1.68.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:eeb38ff04ab6e5756a2aef6ad8d94e89bb4a51ef96e20f45c44ba190fa0bcaad"}, + {file = "grpcio-1.68.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a3869a6661ec8f81d93f4597da50336718bde9eb13267a699ac7e0a1d6d0bea"}, + {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2c4cec6177bf325eb6faa6bd834d2ff6aa8bb3b29012cceb4937b86f8b74323c"}, + {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12941d533f3cd45d46f202e3667be8ebf6bcb3573629c7ec12c3e211d99cfccf"}, + {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80af6f1e69c5e68a2be529990684abdd31ed6622e988bf18850075c81bb1ad6e"}, + {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e8dbe3e00771bfe3d04feed8210fc6617006d06d9a2679b74605b9fed3e8362c"}, + {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:83bbf5807dc3ee94ce1de2dfe8a356e1d74101e4b9d7aa8c720cc4818a34aded"}, + {file = "grpcio-1.68.1-cp38-cp38-win32.whl", hash = "sha256:8cb620037a2fd9eeee97b4531880e439ebfcd6d7d78f2e7dcc3726428ab5ef63"}, + {file = "grpcio-1.68.1-cp38-cp38-win_amd64.whl", hash = "sha256:52fbf85aa71263380d330f4fce9f013c0798242e31ede05fcee7fbe40ccfc20d"}, + {file = "grpcio-1.68.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb400138e73969eb5e0535d1d06cae6a6f7a15f2cc74add320e2130b8179211a"}, + {file = "grpcio-1.68.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a1b988b40f2fd9de5c820f3a701a43339d8dcf2cb2f1ca137e2c02671cc83ac1"}, + {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:96f473cdacfdd506008a5d7579c9f6a7ff245a9ade92c3c0265eb76cc591914f"}, + {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37ea3be171f3cf3e7b7e412a98b77685eba9d4fd67421f4a34686a63a65d99f9"}, + {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ceb56c4285754e33bb3c2fa777d055e96e6932351a3082ce3559be47f8024f0"}, + {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dffd29a2961f3263a16d73945b57cd44a8fd0b235740cb14056f0612329b345e"}, + {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:025f790c056815b3bf53da850dd70ebb849fd755a4b1ac822cb65cd631e37d43"}, + {file = "grpcio-1.68.1-cp39-cp39-win32.whl", hash = "sha256:1098f03dedc3b9810810568060dea4ac0822b4062f537b0f53aa015269be0a76"}, + {file = "grpcio-1.68.1-cp39-cp39-win_amd64.whl", hash = "sha256:334ab917792904245a028f10e803fcd5b6f36a7b2173a820c0b5b076555825e1"}, + {file = "grpcio-1.68.1.tar.gz", hash = "sha256:44a8502dd5de653ae6a73e2de50a401d84184f0331d0ac3daeb044e66d5c5054"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.64.1)"] +protobuf = ["grpcio-tools (>=1.68.1)"] [[package]] name = "grpcio-status" -version = "1.62.2" +version = "1.62.3" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-status-1.62.2.tar.gz", hash = "sha256:62e1bfcb02025a1cd73732a2d33672d3e9d0df4d21c12c51e0bbcaf09bab742a"}, - {file = "grpcio_status-1.62.2-py3-none-any.whl", hash = "sha256:206ddf0eb36bc99b033f03b2c8e95d319f0044defae9b41ae21408e7e0cda48f"}, + {file = "grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485"}, + {file = "grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.2" +grpcio = ">=1.62.3" protobuf = ">=4.21.6" [[package]] @@ -1459,13 +1543,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] @@ -1476,7 +1560,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httplib2" @@ -1494,13 +1578,13 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -1516,6 +1600,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "httpx-sse" @@ -1530,13 +1615,13 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.23.5" +version = "0.26.5" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.23.5-py3-none-any.whl", hash = "sha256:d7a7d337615e11a45cc14a0ce5a605db6b038dc24af42866f731684825226e90"}, - {file = "huggingface_hub-0.23.5.tar.gz", hash = "sha256:67a9caba79b71235be3752852ca27da86bd54311d2424ca8afdb8dda056edf98"}, + {file = "huggingface_hub-0.26.5-py3-none-any.whl", hash = "sha256:fb7386090bbe892072e64b85f7c4479fd2d65eea5f2543327c970d5169e83924"}, + {file = "huggingface_hub-0.26.5.tar.gz", hash = "sha256:1008bd18f60bfb65e8dbc0a97249beeeaa8c99d3c2fa649354df9fa5a13ed83b"}, ] [package.dependencies] @@ -1549,17 +1634,17 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "libcst (==1.4.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "libcst (==1.4.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] +inference = ["aiohttp"] +quality = ["libcst (==1.4.0)", "mypy (==1.5.1)", "ruff (>=0.5.0)"] tensorflow = ["graphviz", "pydot", "tensorflow"] tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors", "torch"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors[torch]", "torch"] typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] [[package]] @@ -1575,15 +1660,18 @@ files = [ [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "imagesize" version = "1.4.1" @@ -1597,22 +1685,26 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.0.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "jinja2" @@ -1633,72 +1725,87 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jiter" -version = "0.5.0" +version = "0.8.2" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f"}, - {file = "jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d"}, - {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87"}, - {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e"}, - {file = "jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf"}, - {file = "jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e"}, - {file = "jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553"}, - {file = "jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06"}, - {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403"}, - {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646"}, - {file = "jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb"}, - {file = "jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae"}, - {file = "jiter-0.5.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9f664e7351604f91dcdd557603c57fc0d551bc65cc0a732fdacbf73ad335049a"}, - {file = "jiter-0.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:044f2f1148b5248ad2c8c3afb43430dccf676c5a5834d2f5089a4e6c5bbd64df"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:702e3520384c88b6e270c55c772d4bd6d7b150608dcc94dea87ceba1b6391248"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:528d742dcde73fad9d63e8242c036ab4a84389a56e04efd854062b660f559544"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf80e5fe6ab582c82f0c3331df27a7e1565e2dcf06265afd5173d809cdbf9ba"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:44dfc9ddfb9b51a5626568ef4e55ada462b7328996294fe4d36de02fce42721f"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c451f7922992751a936b96c5f5b9bb9312243d9b754c34b33d0cb72c84669f4e"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:308fce789a2f093dca1ff91ac391f11a9f99c35369117ad5a5c6c4903e1b3e3a"}, - {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7f5ad4a7c6b0d90776fdefa294f662e8a86871e601309643de30bf94bb93a64e"}, - {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ea189db75f8eca08807d02ae27929e890c7d47599ce3d0a6a5d41f2419ecf338"}, - {file = "jiter-0.5.0-cp312-none-win32.whl", hash = "sha256:e3bbe3910c724b877846186c25fe3c802e105a2c1fc2b57d6688b9f8772026e4"}, - {file = "jiter-0.5.0-cp312-none-win_amd64.whl", hash = "sha256:a586832f70c3f1481732919215f36d41c59ca080fa27a65cf23d9490e75b2ef5"}, - {file = "jiter-0.5.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f04bc2fc50dc77be9d10f73fcc4e39346402ffe21726ff41028f36e179b587e6"}, - {file = "jiter-0.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f433a4169ad22fcb550b11179bb2b4fd405de9b982601914ef448390b2954f3"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad4a6398c85d3a20067e6c69890ca01f68659da94d74c800298581724e426c7e"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6baa88334e7af3f4d7a5c66c3a63808e5efbc3698a1c57626541ddd22f8e4fbf"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ece0a115c05efca597c6d938f88c9357c843f8c245dbbb53361a1c01afd7148"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:335942557162ad372cc367ffaf93217117401bf930483b4b3ebdb1223dbddfa7"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649b0ee97a6e6da174bffcb3c8c051a5935d7d4f2f52ea1583b5b3e7822fbf14"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4be354c5de82157886ca7f5925dbda369b77344b4b4adf2723079715f823989"}, - {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5206144578831a6de278a38896864ded4ed96af66e1e63ec5dd7f4a1fce38a3a"}, - {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8120c60f8121ac3d6f072b97ef0e71770cc72b3c23084c72c4189428b1b1d3b6"}, - {file = "jiter-0.5.0-cp38-none-win32.whl", hash = "sha256:6f1223f88b6d76b519cb033a4d3687ca157c272ec5d6015c322fc5b3074d8a5e"}, - {file = "jiter-0.5.0-cp38-none-win_amd64.whl", hash = "sha256:c59614b225d9f434ea8fc0d0bec51ef5fa8c83679afedc0433905994fb36d631"}, - {file = "jiter-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0af3838cfb7e6afee3f00dc66fa24695199e20ba87df26e942820345b0afc566"}, - {file = "jiter-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:550b11d669600dbc342364fd4adbe987f14d0bbedaf06feb1b983383dcc4b961"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:489875bf1a0ffb3cb38a727b01e6673f0f2e395b2aad3c9387f94187cb214bbf"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b250ca2594f5599ca82ba7e68785a669b352156260c5362ea1b4e04a0f3e2389"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ea18e01f785c6667ca15407cd6dabbe029d77474d53595a189bdc813347218e"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462a52be85b53cd9bffd94e2d788a09984274fe6cebb893d6287e1c296d50653"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92cc68b48d50fa472c79c93965e19bd48f40f207cb557a8346daa020d6ba973b"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c834133e59a8521bc87ebcad773608c6fa6ab5c7a022df24a45030826cf10bc"}, - {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab3a71ff31cf2d45cb216dc37af522d335211f3a972d2fe14ea99073de6cb104"}, - {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cccd3af9c48ac500c95e1bcbc498020c87e1781ff0345dd371462d67b76643eb"}, - {file = "jiter-0.5.0-cp39-none-win32.whl", hash = "sha256:368084d8d5c4fc40ff7c3cc513c4f73e02c85f6009217922d0823a48ee7adf61"}, - {file = "jiter-0.5.0-cp39-none-win_amd64.whl", hash = "sha256:ce03f7b4129eb72f1687fa11300fbf677b02990618428934662406d2a76742a1"}, - {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff"}, + {file = "jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43"}, + {file = "jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6"}, + {file = "jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44"}, + {file = "jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29"}, + {file = "jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e"}, + {file = "jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ca1f08b8e43dc3bd0594c992fb1fd2f7ce87f7bf0d44358198d6da8034afdf84"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5672a86d55416ccd214c778efccf3266b84f87b89063b582167d803246354be4"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58dc9bc9767a1101f4e5e22db1b652161a225874d66f0e5cb8e2c7d1c438b587"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b2998606d6dadbb5ccda959a33d6a5e853252d921fec1792fc902351bb4e2c"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab9a87f3784eb0e098f84a32670cfe4a79cb6512fd8f42ae3d0709f06405d18"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79aec8172b9e3c6d05fd4b219d5de1ac616bd8da934107325a6c0d0e866a21b6"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711e408732d4e9a0208008e5892c2966b485c783cd2d9a681f3eb147cf36c7ef"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:653cf462db4e8c41995e33d865965e79641ef45369d8a11f54cd30888b7e6ff1"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:9c63eaef32b7bebac8ebebf4dabebdbc6769a09c127294db6babee38e9f405b9"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:eb21aaa9a200d0a80dacc7a81038d2e476ffe473ffdd9c91eb745d623561de05"}, + {file = "jiter-0.8.2-cp313-cp313-win32.whl", hash = "sha256:789361ed945d8d42850f919342a8665d2dc79e7e44ca1c97cc786966a21f627a"}, + {file = "jiter-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ab7f43235d71e03b941c1630f4b6e3055d46b6cb8728a17663eaac9d8e83a865"}, + {file = "jiter-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b426f72cd77da3fec300ed3bc990895e2dd6b49e3bfe6c438592a3ba660e41ca"}, + {file = "jiter-0.8.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2dd880785088ff2ad21ffee205e58a8c1ddabc63612444ae41e5e4b321b39c0"}, + {file = "jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9e1fa156ee9454642adb7e7234a383884452532bc9d53d5af2d18d98ada1d79c"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cf5dfa9956d96ff2efb0f8e9c7d055904012c952539a774305aaaf3abdf3d6c"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e52bf98c7e727dd44f7c4acb980cb988448faeafed8433c867888268899b298b"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2ecaa3c23e7a7cf86d00eda3390c232f4d533cd9ddea4b04f5d0644faf642c5"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08d4c92bf480e19fc3f2717c9ce2aa31dceaa9163839a311424b6862252c943e"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d9a1eded738299ba8e106c6779ce5c3893cffa0e32e4485d680588adae6db8"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20be8b7f606df096e08b0b1b4a3c6f0515e8dac296881fe7461dfa0fb5ec817"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33f94615fcaf872f7fd8cd98ac3b429e435c77619777e8a449d9d27e01134d1"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:317b25e98a35ffec5c67efe56a4e9970852632c810d35b34ecdd70cc0e47b3b6"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9043259ee430ecd71d178fccabd8c332a3bf1e81e50cae43cc2b28d19e4cb7"}, + {file = "jiter-0.8.2-cp38-cp38-win32.whl", hash = "sha256:fc5adda618205bd4678b146612ce44c3cbfdee9697951f2c0ffdef1f26d72b63"}, + {file = "jiter-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cd646c827b4f85ef4a78e4e58f4f5854fae0caf3db91b59f0d73731448a970c6"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637"}, + {file = "jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36"}, + {file = "jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a"}, + {file = "jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d"}, ] [[package]] @@ -1749,13 +1856,13 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.12.1" +version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, ] [package.dependencies] @@ -1763,13 +1870,13 @@ referencing = ">=0.31.0" [[package]] name = "jupyter-client" -version = "8.6.2" +version = "8.6.3" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, - {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, + {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, + {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, ] [package.dependencies] @@ -1815,73 +1922,109 @@ files = [ {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, ] +[[package]] +name = "lancedb" +version = "0.14.0" +description = "lancedb" +optional = false +python-versions = ">=3.9" +files = [ + {file = "lancedb-0.14.0-cp38-abi3-macosx_10_15_x86_64.whl", hash = "sha256:6b970e6f503464918789d76c43d70d93d85ef82dc6dbec9685483c60c36ba491"}, + {file = "lancedb-0.14.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:e28932882a0f893a295b391b05b0af9d95918e2cd10d6d58991e3282c06c0bd3"}, + {file = "lancedb-0.14.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:faef7fe76af9373656660e2e652e3d330735e84680649f0d74c558a0460f0d55"}, + {file = "lancedb-0.14.0-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:777e2d483f13814a2a5624c6824936f400aeab52b961853f1352cc21564f7d6f"}, + {file = "lancedb-0.14.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:31fec6c05edf657542d91c396b895b2ba02f0e6114188ea9bb03a3112907a71e"}, + {file = "lancedb-0.14.0-cp38-abi3-win_amd64.whl", hash = "sha256:a4e758156554e2a2a493ad569278d8f938e209f38f215924ed1c5f368d1f402e"}, +] + +[package.dependencies] +attrs = ">=21.3.0" +cachetools = "*" +deprecation = "*" +overrides = ">=0.7" +packaging = "*" +pydantic = ">=1.10" +pylance = "0.18.2" +requests = ">=2.31.0" +retry = ">=0.9.2" +tqdm = ">=4.27.0" + +[package.extras] +azure = ["adlfs (>=2024.2.0)"] +clip = ["open-clip", "pillow", "torch"] +dev = ["pre-commit", "ruff"] +docs = ["mkdocs", "mkdocs-jupyter", "mkdocs-material", "mkdocstrings[python]"] +embeddings = ["awscli (>=1.29.57)", "boto3 (>=1.28.57)", "botocore (>=1.31.57)", "cohere", "google-generativeai", "huggingface-hub", "ibm-watsonx-ai (>=1.1.2)", "instructorembedding", "ollama", "open-clip-torch", "openai (>=1.6.1)", "pillow", "sentence-transformers", "torch"] +tests = ["aiohttp", "boto3", "duckdb", "pandas (>=1.4)", "polars (>=0.19,<=1.3.0)", "pytest", "pytest-asyncio", "pytest-mock", "pytz", "tantivy"] + [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] @@ -1914,13 +2057,13 @@ tests = ["pytest (>=4.6)"] [[package]] name = "msal" -version = "1.31.0" +version = "1.31.1" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = false python-versions = ">=3.7" files = [ - {file = "msal-1.31.0-py3-none-any.whl", hash = "sha256:96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7"}, - {file = "msal-1.31.0.tar.gz", hash = "sha256:2c4f189cf9cc8f00c80045f66d39b7c0f3ed45873fd3d1f2af9f22db2e12ff4b"}, + {file = "msal-1.31.1-py3-none-any.whl", hash = "sha256:29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17"}, + {file = "msal-1.31.1.tar.gz", hash = "sha256:11b5e6a3f802ffd3a72107203e20c4eac6ef53401961b880af2835b723d80578"}, ] [package.dependencies] @@ -1948,103 +2091,108 @@ portalocker = ">=1.4,<3" [[package]] name = "multidict" -version = "6.0.5" +version = "6.1.0" description = "multidict implementation" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + [[package]] name = "multiprocess" version = "0.70.16" @@ -2071,13 +2219,13 @@ dill = ">=0.3.8" [[package]] name = "nbclient" -version = "0.10.0" +version = "0.10.1" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false python-versions = ">=3.8.0" files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, + {file = "nbclient-0.10.1-py3-none-any.whl", hash = "sha256:949019b9240d66897e442888cfb618f69ef23dc71c01cb5fced8499c2cfc084d"}, + {file = "nbclient-0.10.1.tar.gz", hash = "sha256:3e93e348ab27e712acd46fccd809139e356eb9a31aab641d1a7991a6eb4e6f68"}, ] [package.dependencies] @@ -2088,7 +2236,7 @@ traitlets = ">=5.4" [package.extras] dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +docs = ["autodoc-traits", "flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "mock", "moto", "myst-parser", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling", "testpath", "xmltodict"] test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] @@ -2152,13 +2300,13 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nbsphinx" -version = "0.9.4" +version = "0.9.5" description = "Jupyter Notebook Tools for Sphinx" optional = false python-versions = ">=3.6" files = [ - {file = "nbsphinx-0.9.4-py3-none-any.whl", hash = "sha256:22cb1d974a8300e8118ca71aea1f649553743c0c5830a54129dcd446e6a8ba17"}, - {file = "nbsphinx-0.9.4.tar.gz", hash = "sha256:042a60806fc23d519bc5bef59d95570713913fe442fda759d53e3aaf62104794"}, + {file = "nbsphinx-0.9.5-py3-none-any.whl", hash = "sha256:d82f71084425db1f48e72515f15c25b4de8652ceaab513ee462ac05f1b8eae0a"}, + {file = "nbsphinx-0.9.5.tar.gz", hash = "sha256:736916e7b0dab28fc904f4a9ae3b53a9a50c29fccc6329c052fcc7485abcf2b7"}, ] [package.dependencies] @@ -2245,46 +2393,50 @@ files = [ [[package]] name = "nvidia-cublas-cu12" -version = "12.1.3.1" +version = "12.4.5.8" description = "CUBLAS native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, + {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0f8aa1706812e00b9f19dfe0cdb3999b092ccb8ca168c0db5b8ea712456fd9b3"}, + {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:2fc8da60df463fdefa81e323eef2e36489e1c94335b5358bcb38360adf75ac9b"}, + {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-win_amd64.whl", hash = "sha256:5a796786da89203a0657eda402bcdcec6180254a8ac22d72213abc42069522dc"}, ] [[package]] name = "nvidia-cuda-cupti-cu12" -version = "12.1.105" +version = "12.4.127" description = "CUDA profiling tools runtime libs." optional = false python-versions = ">=3" files = [ - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, + {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:79279b35cf6f91da114182a5ce1864997fd52294a87a16179ce275773799458a"}, + {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9dec60f5ac126f7bb551c055072b69d85392b13311fcc1bcda2202d172df30fb"}, + {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:5688d203301ab051449a2b1cb6690fbe90d2b372f411521c86018b950f3d7922"}, ] [[package]] name = "nvidia-cuda-nvrtc-cu12" -version = "12.1.105" +version = "12.4.127" description = "NVRTC native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, + {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0eedf14185e04b76aa05b1fea04133e59f465b6f960c0cbf4e37c3cb6b0ea198"}, + {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a178759ebb095827bd30ef56598ec182b85547f1508941a3d560eb7ea1fbf338"}, + {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:a961b2f1d5f17b14867c619ceb99ef6fcec12e46612711bcec78eb05068a60ec"}, ] [[package]] name = "nvidia-cuda-runtime-cu12" -version = "12.1.105" +version = "12.4.127" description = "CUDA Runtime native Libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, + {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:961fe0e2e716a2a1d967aab7caee97512f71767f852f67432d572e36cb3a11f3"}, + {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:64403288fa2136ee8e467cdc9c9427e0434110899d07c779f25b5c068934faa5"}, + {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:09c2e35f48359752dfa822c09918211844a3d93c100a715d79b59591130c5e1e"}, ] [[package]] @@ -2303,35 +2455,41 @@ nvidia-cublas-cu12 = "*" [[package]] name = "nvidia-cufft-cu12" -version = "11.0.2.54" +version = "11.2.1.3" description = "CUFFT native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, + {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5dad8008fc7f92f5ddfa2101430917ce2ffacd86824914c82e28990ad7f00399"}, + {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f083fc24912aa410be21fa16d157fed2055dab1cc4b6934a0e03cba69eb242b9"}, + {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-win_amd64.whl", hash = "sha256:d802f4954291101186078ccbe22fc285a902136f974d369540fd4a5333d1440b"}, ] +[package.dependencies] +nvidia-nvjitlink-cu12 = "*" + [[package]] name = "nvidia-curand-cu12" -version = "10.3.2.106" +version = "10.3.5.147" description = "CURAND native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, + {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1f173f09e3e3c76ab084aba0de819c49e56614feae5c12f69883f4ae9bb5fad9"}, + {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a88f583d4e0bb643c49743469964103aa59f7f708d862c3ddb0fc07f851e3b8b"}, + {file = "nvidia_curand_cu12-10.3.5.147-py3-none-win_amd64.whl", hash = "sha256:f307cc191f96efe9e8f05a87096abc20d08845a841889ef78cb06924437f6771"}, ] [[package]] name = "nvidia-cusolver-cu12" -version = "11.4.5.107" +version = "11.6.1.9" description = "CUDA solver native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, + {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d338f155f174f90724bbde3758b7ac375a70ce8e706d70b018dd3375545fc84e"}, + {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:19e33fa442bcfd085b3086c4ebf7e8debc07cfe01e11513cc6d332fd918ac260"}, + {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-win_amd64.whl", hash = "sha256:e77314c9d7b694fcebc84f58989f3aa4fb4cb442f12ca1a9bde50f5e8f6d1b9c"}, ] [package.dependencies] @@ -2341,13 +2499,14 @@ nvidia-nvjitlink-cu12 = "*" [[package]] name = "nvidia-cusparse-cu12" -version = "12.1.0.106" +version = "12.3.1.170" description = "CUSPARSE native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, + {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9d32f62896231ebe0480efd8a7f702e143c98cfaa0e8a76df3386c1ba2b54df3"}, + {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1"}, + {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-win_amd64.whl", hash = "sha256:9bc90fb087bc7b4c15641521f31c0371e9a612fc2ba12c338d3ae032e6b6797f"}, ] [package.dependencies] @@ -2355,47 +2514,47 @@ nvidia-nvjitlink-cu12 = "*" [[package]] name = "nvidia-nccl-cu12" -version = "2.20.5" +version = "2.21.5" description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"}, - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"}, + {file = "nvidia_nccl_cu12-2.21.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8579076d30a8c24988834445f8d633c697d42397e92ffc3f63fa26766d25e0a0"}, ] [[package]] name = "nvidia-nvjitlink-cu12" -version = "12.6.20" +version = "12.4.127" description = "Nvidia JIT LTO Library" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_aarch64.whl", hash = "sha256:84fb38465a5bc7c70cbc320cfd0963eb302ee25a5e939e9f512bbba55b6072fb"}, - {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl", hash = "sha256:562ab97ea2c23164823b2a89cb328d01d45cb99634b8c65fe7cd60d14562bd79"}, - {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-win_amd64.whl", hash = "sha256:ed3c43a17f37b0c922a919203d2d36cbef24d41cc3e6b625182f8b58203644f6"}, + {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83"}, + {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"}, + {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:fd9020c501d27d135f983c6d3e244b197a7ccad769e34df53a42e276b0e25fa1"}, ] [[package]] name = "nvidia-nvtx-cu12" -version = "12.1.105" +version = "12.4.127" description = "NVIDIA Tools Extension" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, + {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7959ad635db13edf4fc65c06a6e9f9e55fc2f92596db928d169c0bb031e88ef3"}, + {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:781e950d9b9f60d8241ccea575b32f5105a5baf4c2351cab5256a24869f12a1a"}, + {file = "nvidia_nvtx_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:641dccaaa1139f3ffb0d3164b4b84f9d253397e38246a4f2f36728b48566d485"}, ] [[package]] name = "ollama" -version = "0.3.0" +version = "0.3.3" description = "The official Python client for Ollama." optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "ollama-0.3.0-py3-none-any.whl", hash = "sha256:cd7010c4e2a37d7f08f36cd35c4592b14f1ec0d1bf3df10342cd47963d81ad7a"}, - {file = "ollama-0.3.0.tar.gz", hash = "sha256:6ff493a2945ba76cdd6b7912a1cd79a45cfd9ba9120d14adeb63b2b5a7f353da"}, + {file = "ollama-0.3.3-py3-none-any.whl", hash = "sha256:ca6242ce78ab34758082b7392df3f9f6c2cb1d070a9dede1a4c545c929e16dba"}, + {file = "ollama-0.3.3.tar.gz", hash = "sha256:f90a6d61803117f40b0e8ff17465cab5e1eb24758a473cfe8101aff38bc13b51"}, ] [package.dependencies] @@ -2403,74 +2562,99 @@ httpx = ">=0.27.0,<0.28.0" [[package]] name = "openai" -version = "1.35.14" +version = "1.57.1" description = "The official Python library for the openai API" optional = false -python-versions = ">=3.7.1" +python-versions = ">=3.8" files = [ - {file = "openai-1.35.14-py3-none-any.whl", hash = "sha256:adadf8c176e0b8c47ad782ed45dc20ef46438ee1f02c7103c4155cff79c8f68b"}, - {file = "openai-1.35.14.tar.gz", hash = "sha256:394ba1dfd12ecec1d634c50e512d24ff1858bbc2674ffcce309b822785a058de"}, + {file = "openai-1.57.1-py3-none-any.whl", hash = "sha256:3865686c927e93492d1145938d4a24b634951531c4b2769d43ca5dbd4b25d8fd"}, + {file = "openai-1.57.1.tar.gz", hash = "sha256:a95f22e04ab3df26e64a15d958342265e802314131275908b3b3e36f8c5d4377"}, ] [package.dependencies] anyio = ">=3.5.0,<5" distro = ">=1.7.0,<2" httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" tqdm = ">4" -typing-extensions = ">=4.7,<5" +typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +[[package]] +name = "overrides" +version = "7.7.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +files = [ + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, +] + [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "pandas" -version = "2.2.2" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] @@ -2510,12 +2694,12 @@ xml = ["lxml (>=4.9.2)"] [[package]] name = "pandoc" -version = "2.3" +version = "2.4" description = "Pandoc Documents for Python" optional = false python-versions = "*" files = [ - {file = "pandoc-2.3.tar.gz", hash = "sha256:e772c2c6d871146894579828dbaf1efd538eb64fc7e71d4a6b3a11a18baef90d"}, + {file = "pandoc-2.4.tar.gz", hash = "sha256:ecd1f8cbb7f4180c6b5db4a17a7c1a74df519995f5f186ef81ce72a9cbd0dd9a"}, ] [package.dependencies] @@ -2549,12 +2733,13 @@ dev = ["jinja2"] [[package]] name = "pgvector" -version = "0.3.1" +version = "0.3.6" description = "pgvector support for Python" optional = false python-versions = ">=3.8" files = [ - {file = "pgvector-0.3.1-py2.py3-none-any.whl", hash = "sha256:7da0629915083a9769b9a73481efb4cdc9122cfd35fc7a9248ce43d177a9c8e8"}, + {file = "pgvector-0.3.6-py3-none-any.whl", hash = "sha256:f6c269b3c110ccb7496bac87202148ed18f34b390a0189c783e351062400a75a"}, + {file = "pgvector-0.3.6.tar.gz", hash = "sha256:31d01690e6ea26cea8a633cde5f0f55f5b246d9c8292d68efdef8c22ec994ade"}, ] [package.dependencies] @@ -2562,38 +2747,39 @@ numpy = "*" [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "plumbum" -version = "1.8.3" +version = "1.9.0" description = "Plumbum: shell combinators library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "plumbum-1.8.3-py3-none-any.whl", hash = "sha256:8595d36dae2472587d6f59789c8d7b26250f45f6f6ed75ccb378de59ee7b9cf9"}, - {file = "plumbum-1.8.3.tar.gz", hash = "sha256:6092c85ab970b7a7a9d5d85c75200bc93be82b33c9bdf640ffa87d2d7c8709f0"}, + {file = "plumbum-1.9.0-py3-none-any.whl", hash = "sha256:9fd0d3b0e8d86e4b581af36edf3f3bbe9d1ae15b45b8caab28de1bcb27aaa7f5"}, + {file = "plumbum-1.9.0.tar.gz", hash = "sha256:e640062b72642c3873bd5bdc3effed75ba4d3c70ef6b6a7b907357a84d909219"}, ] [package.dependencies] pywin32 = {version = "*", markers = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""} [package.extras] -dev = ["paramiko", "psutil", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "pytest-timeout"] +dev = ["coverage[toml]", "paramiko", "psutil", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "pytest-timeout"] docs = ["sphinx (>=4.0.0)", "sphinx-rtd-theme (>=1.0.0)"] ssh = ["paramiko"] +test = ["coverage[toml]", "paramiko", "psutil", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "pytest-timeout"] [[package]] name = "ply" @@ -2625,15 +2811,106 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] +[[package]] +name = "propcache" +version = "0.2.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +files = [ + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b"}, + {file = "propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4"}, + {file = "propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e"}, + {file = "propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034"}, + {file = "propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518"}, + {file = "propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246"}, + {file = "propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30"}, + {file = "propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6"}, + {file = "propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587"}, + {file = "propcache-0.2.1-cp39-cp39-win32.whl", hash = "sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb"}, + {file = "propcache-0.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1"}, + {file = "propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54"}, + {file = "propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64"}, +] + [[package]] name = "proto-plus" -version = "1.24.0" +version = "1.25.0" description = "Beautiful, Pythonic protocol buffers." optional = false python-versions = ">=3.7" files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, + {file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"}, + {file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"}, ] [package.dependencies] @@ -2644,106 +2921,109 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.3" +version = "4.25.5" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, + {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, + {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, + {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, + {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, + {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, + {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, + {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, + {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, ] [[package]] -name = "pyarrow" -version = "17.0.0" -description = "Python library for Apache Arrow" +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, - {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, - {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, - {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, - {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, - {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, - {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, - {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] -[package.dependencies] -numpy = ">=1.16.6" - -[package.extras] -test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] - [[package]] -name = "pyarrow-hotfix" -version = "0.6" -description = "" +name = "pyarrow" +version = "18.1.0" +description = "Python library for Apache Arrow" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178"}, - {file = "pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945"}, + {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c"}, + {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f443122c8e31f4c9199cb23dca29ab9427cef990f283f80fe15b8e124bcc49b"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a03da7f2758645d17b7b4f83c8bffeae5bbb7f974523fe901f36288d2eab71"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ba17845efe3aa358ec266cf9cc2800fa73038211fb27968bfa88acd09261a470"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3c35813c11a059056a22a3bef520461310f2f7eea5c8a11ef9de7062a23f8d56"}, + {file = "pyarrow-18.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9736ba3c85129d72aefa21b4f3bd715bc4190fe4426715abfff90481e7d00812"}, + {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854"}, + {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0"}, + {file = "pyarrow-18.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a"}, + {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d"}, + {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30"}, + {file = "pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99"}, + {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:84e314d22231357d473eabec709d0ba285fa706a72377f9cc8e1cb3c8013813b"}, + {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f591704ac05dfd0477bb8f8e0bd4b5dc52c1cadf50503858dce3a15db6e46ff2"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acb7564204d3c40babf93a05624fc6a8ec1ab1def295c363afc40b0c9e66c191"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74de649d1d2ccb778f7c3afff6085bd5092aed4c23df9feeb45dd6b16f3811aa"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f96bd502cb11abb08efea6dab09c003305161cb6c9eafd432e35e76e7fa9b90c"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:36ac22d7782554754a3b50201b607d553a8d71b78cdf03b33c1125be4b52397c"}, + {file = "pyarrow-18.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:25dbacab8c5952df0ca6ca0af28f50d45bd31c1ff6fcf79e2d120b4a65ee7181"}, + {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a276190309aba7bc9d5bd2933230458b3521a4317acfefe69a354f2fe59f2bc"}, + {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ad514dbfcffe30124ce655d72771ae070f30bf850b48bc4d9d3b25993ee0e386"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebc13a11ed3032d8dd6e7171eb6e86d40d67a5639d96c35142bd568b9299324"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6cf5c05f3cee251d80e98726b5c7cc9f21bab9e9783673bac58e6dfab57ecc8"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:11b676cd410cf162d3f6a70b43fb9e1e40affbc542a1e9ed3681895f2962d3d9"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b76130d835261b38f14fc41fdfb39ad8d672afb84c447126b84d5472244cfaba"}, + {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:0b331e477e40f07238adc7ba7469c36b908f07c89b95dd4bd3a0ec84a3d1e21e"}, + {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c4dd0c9010a25ba03e198fe743b1cc03cd33c08190afff371749c52ccbbaf76"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f97b31b4c4e21ff58c6f330235ff893cc81e23da081b1a4b1c982075e0ed4e9"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a4813cb8ecf1809871fd2d64a8eff740a1bd3691bbe55f01a3cf6c5ec869754"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:05a5636ec3eb5cc2a36c6edb534a38ef57b2ab127292a716d00eabb887835f1e"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:73eeed32e724ea3568bb06161cad5fa7751e45bc2228e33dcb10c614044165c7"}, + {file = "pyarrow-18.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a1880dd6772b685e803011a6b43a230c23b566859a6e0c9a276c1e0faf4f4052"}, + {file = "pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73"}, ] +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] + [[package]] name = "pyasn1" -version = "0.6.0" +version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] name = "pyasn1-modules" -version = "0.4.0" +version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] @@ -2762,122 +3042,131 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.10.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" -typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, -] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.27.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, ] [package.dependencies] @@ -2927,13 +3216,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.9.0" +version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, - {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [package.dependencies] @@ -2945,15 +3234,43 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pylance" +version = "0.18.2" +description = "python wrapper for Lance columnar format" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pylance-0.18.2-cp39-abi3-macosx_10_15_x86_64.whl", hash = "sha256:017422b058724dfbe8426c1ac42f0ede77324f3783e177cb4239dc034758b50b"}, + {file = "pylance-0.18.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:c4c4049eb6a6075cef721a20dd28ccba6d89b66f13e8d20ef65a284ae1c02e30"}, + {file = "pylance-0.18.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89dcf2dadee940ea86ac0b3bf7ba81c68e9774a449d8de206bc60cdc8804b853"}, + {file = "pylance-0.18.2-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:f37fb7ad0e53076c731014c210a45919f3b2620c967e2f62cf8b7c26fdc9aace"}, + {file = "pylance-0.18.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a913920f591d8404c46c74e3911fe0c29d47b923b9c3c7e521d3354c1663d812"}, + {file = "pylance-0.18.2-cp39-abi3-win_amd64.whl", hash = "sha256:72796676d7647ba9f6e86531daf67880f5e69ba8f842e237ad0c1ca419c6378c"}, +] + +[package.dependencies] +numpy = ">=1.22,<2" +pyarrow = ">=12" + +[package.extras] +benchmarks = ["pytest-benchmark"] +cuvs-cu11 = ["cuvs-cu11", "pylibraft-cu11"] +cuvs-cu12 = ["cuvs-cu12", "pylibraft-cu12"] +dev = ["ruff (==0.4.1)"] +ray = ["ray[data]"] +tests = ["boto3", "datasets", "duckdb", "ml-dtypes", "pandas", "pillow", "polars[pandas,pyarrow]", "pytest", "tensorflow", "tqdm"] +torch = ["torch"] + [[package]] name = "pyparsing" -version = "3.1.2" +version = "3.2.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false -python-versions = ">=3.6.8" +python-versions = ">=3.9" files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, + {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, + {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, ] [package.extras] @@ -2989,193 +3306,220 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] name = "pywin32" -version = "306" +version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "pyzmq" -version = "26.0.3" +version = "26.2.0" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" files = [ - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, - {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, - {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, - {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, - {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, - {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, - {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, - {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, + {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, + {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, + {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, + {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, + {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, + {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, + {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, + {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, + {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, ] [package.dependencies] @@ -3183,13 +3527,13 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qdrant-client" -version = "1.10.1" +version = "1.12.1" description = "Client library for the Qdrant vector search engine" optional = false python-versions = ">=3.8" files = [ - {file = "qdrant_client-1.10.1-py3-none-any.whl", hash = "sha256:b9fb8fe50dd168d92b2998be7c6135d5a229b3a3258ad158cc69c8adf9ff1810"}, - {file = "qdrant_client-1.10.1.tar.gz", hash = "sha256:2284c8c5bb1defb0d9dbacb07d16f344972f395f4f2ed062318476a7951fd84c"}, + {file = "qdrant_client-1.12.1-py3-none-any.whl", hash = "sha256:b2d17ce18e9e767471368380dd3bbc4a0e3a0e2061fedc9af3542084b48451e0"}, + {file = "qdrant_client-1.12.1.tar.gz", hash = "sha256:35e8e646f75b7b883b3d2d0ee4c69c5301000bba41c82aa546e985db0f1aeb72"}, ] [package.dependencies] @@ -3205,8 +3549,8 @@ pydantic = ">=1.10.8" urllib3 = ">=1.26.14,<3" [package.extras] -fastembed = ["fastembed (==0.2.7)"] -fastembed-gpu = ["fastembed-gpu (==0.2.7)"] +fastembed = ["fastembed (==0.3.6)"] +fastembed-gpu = ["fastembed-gpu (==0.3.6)"] [[package]] name = "readthedocs-sphinx-search" @@ -3236,90 +3580,105 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2024.5.15" +version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, - {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, - {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, - {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, - {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, - {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, - {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, - {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, - {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, - {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, - {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, - {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, ] [[package]] @@ -3343,112 +3702,131 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "retry" +version = "0.9.2" +description = "Easy to use retry decorator." +optional = false +python-versions = "*" +files = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, +] + +[package.dependencies] +decorator = ">=3.4.2" +py = ">=1.4.26,<2.0.0" + [[package]] name = "rpds-py" -version = "0.19.0" +version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "rpds_py-0.19.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4"}, - {file = "rpds_py-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54548e0be3ac117595408fd4ca0ac9278fde89829b0b518be92863b17ff67a2"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8eb488ef928cdbc05a27245e52de73c0d7c72a34240ef4d9893fdf65a8c1a955"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5da93debdfe27b2bfc69eefb592e1831d957b9535e0943a0ee8b97996de21b5"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79e205c70afddd41f6ee79a8656aec738492a550247a7af697d5bd1aee14f766"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:959179efb3e4a27610e8d54d667c02a9feaa86bbabaf63efa7faa4dfa780d4f1"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6e605bb9edcf010f54f8b6a590dd23a4b40a8cb141255eec2a03db249bc915b"}, - {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9133d75dc119a61d1a0ded38fb9ba40a00ef41697cc07adb6ae098c875195a3f"}, - {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd36b712d35e757e28bf2f40a71e8f8a2d43c8b026d881aa0c617b450d6865c9"}, - {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354f3a91718489912f2e0fc331c24eaaf6a4565c080e00fbedb6015857c00582"}, - {file = "rpds_py-0.19.0-cp310-none-win32.whl", hash = "sha256:ebcbf356bf5c51afc3290e491d3722b26aaf5b6af3c1c7f6a1b757828a46e336"}, - {file = "rpds_py-0.19.0-cp310-none-win_amd64.whl", hash = "sha256:75a6076289b2df6c8ecb9d13ff79ae0cad1d5fb40af377a5021016d58cd691ec"}, - {file = "rpds_py-0.19.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6d45080095e585f8c5097897313def60caa2046da202cdb17a01f147fb263b81"}, - {file = "rpds_py-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5c9581019c96f865483d031691a5ff1cc455feb4d84fc6920a5ffc48a794d8a"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1540d807364c84516417115c38f0119dfec5ea5c0dd9a25332dea60b1d26fc4d"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e65489222b410f79711dc3d2d5003d2757e30874096b2008d50329ea4d0f88c"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9da6f400eeb8c36f72ef6646ea530d6d175a4f77ff2ed8dfd6352842274c1d8b"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f46bb11858717e0efa7893c0f7055c43b44c103e40e69442db5061cb26ed34"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:071d4adc734de562bd11d43bd134330fb6249769b2f66b9310dab7460f4bf714"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9625367c8955e4319049113ea4f8fee0c6c1145192d57946c6ffcd8fe8bf48dd"}, - {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e19509145275d46bc4d1e16af0b57a12d227c8253655a46bbd5ec317e941279d"}, - {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d438e4c020d8c39961deaf58f6913b1bf8832d9b6f62ec35bd93e97807e9cbc"}, - {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90bf55d9d139e5d127193170f38c584ed3c79e16638890d2e36f23aa1630b952"}, - {file = "rpds_py-0.19.0-cp311-none-win32.whl", hash = "sha256:8d6ad132b1bc13d05ffe5b85e7a01a3998bf3a6302ba594b28d61b8c2cf13aaf"}, - {file = "rpds_py-0.19.0-cp311-none-win_amd64.whl", hash = "sha256:7ec72df7354e6b7f6eb2a17fa6901350018c3a9ad78e48d7b2b54d0412539a67"}, - {file = "rpds_py-0.19.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5095a7c838a8647c32aa37c3a460d2c48debff7fc26e1136aee60100a8cd8f68"}, - {file = "rpds_py-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f2f78ef14077e08856e788fa482107aa602636c16c25bdf59c22ea525a785e9"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cc6cb44f8636fbf4a934ca72f3e786ba3c9f9ba4f4d74611e7da80684e48d2"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf902878b4af334a09de7a45badbff0389e7cf8dc2e4dcf5f07125d0b7c2656d"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:688aa6b8aa724db1596514751ffb767766e02e5c4a87486ab36b8e1ebc1aedac"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57dbc9167d48e355e2569346b5aa4077f29bf86389c924df25c0a8b9124461fb"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4cf5a9497874822341c2ebe0d5850fed392034caadc0bad134ab6822c0925b"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a790d235b9d39c70a466200d506bb33a98e2ee374a9b4eec7a8ac64c2c261fa"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1d16089dfa58719c98a1c06f2daceba6d8e3fb9b5d7931af4a990a3c486241cb"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bc9128e74fe94650367fe23f37074f121b9f796cabbd2f928f13e9661837296d"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8f77e661ffd96ff104bebf7d0f3255b02aa5d5b28326f5408d6284c4a8b3248"}, - {file = "rpds_py-0.19.0-cp312-none-win32.whl", hash = "sha256:5f83689a38e76969327e9b682be5521d87a0c9e5a2e187d2bc6be4765f0d4600"}, - {file = "rpds_py-0.19.0-cp312-none-win_amd64.whl", hash = "sha256:06925c50f86da0596b9c3c64c3837b2481337b83ef3519e5db2701df695453a4"}, - {file = "rpds_py-0.19.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:52e466bea6f8f3a44b1234570244b1cff45150f59a4acae3fcc5fd700c2993ca"}, - {file = "rpds_py-0.19.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e21cc693045fda7f745c790cb687958161ce172ffe3c5719ca1764e752237d16"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b31f059878eb1f5da8b2fd82480cc18bed8dcd7fb8fe68370e2e6285fa86da6"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dd46f309e953927dd018567d6a9e2fb84783963650171f6c5fe7e5c41fd5666"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a01a4490e170376cd79258b7f755fa13b1a6c3667e872c8e35051ae857a92b"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcf426a8c38eb57f7bf28932e68425ba86def6e756a5b8cb4731d8e62e4e0223"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68eea5df6347d3f1378ce992d86b2af16ad7ff4dcb4a19ccdc23dea901b87fb"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dab8d921b55a28287733263c0e4c7db11b3ee22aee158a4de09f13c93283c62d"}, - {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6fe87efd7f47266dfc42fe76dae89060038f1d9cb911f89ae7e5084148d1cc08"}, - {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:535d4b52524a961d220875688159277f0e9eeeda0ac45e766092bfb54437543f"}, - {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8b1a94b8afc154fbe36978a511a1f155f9bd97664e4f1f7a374d72e180ceb0ae"}, - {file = "rpds_py-0.19.0-cp38-none-win32.whl", hash = "sha256:7c98298a15d6b90c8f6e3caa6457f4f022423caa5fa1a1ca7a5e9e512bdb77a4"}, - {file = "rpds_py-0.19.0-cp38-none-win_amd64.whl", hash = "sha256:b0da31853ab6e58a11db3205729133ce0df26e6804e93079dee095be3d681dc1"}, - {file = "rpds_py-0.19.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5039e3cef7b3e7a060de468a4a60a60a1f31786da94c6cb054e7a3c75906111c"}, - {file = "rpds_py-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab1932ca6cb8c7499a4d87cb21ccc0d3326f172cfb6a64021a889b591bb3045c"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2afd2164a1e85226fcb6a1da77a5c8896c18bfe08e82e8ceced5181c42d2179"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1c30841f5040de47a0046c243fc1b44ddc87d1b12435a43b8edff7e7cb1e0d0"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f757f359f30ec7dcebca662a6bd46d1098f8b9fb1fcd661a9e13f2e8ce343ba1"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15e65395a59d2e0e96caf8ee5389ffb4604e980479c32742936ddd7ade914b22"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb0f6eb3a320f24b94d177e62f4074ff438f2ad9d27e75a46221904ef21a7b05"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b228e693a2559888790936e20f5f88b6e9f8162c681830eda303bad7517b4d5a"}, - {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2575efaa5d949c9f4e2cdbe7d805d02122c16065bfb8d95c129372d65a291a0b"}, - {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5c872814b77a4e84afa293a1bee08c14daed1068b2bb1cc312edbf020bbbca2b"}, - {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850720e1b383df199b8433a20e02b25b72f0fded28bc03c5bd79e2ce7ef050be"}, - {file = "rpds_py-0.19.0-cp39-none-win32.whl", hash = "sha256:ce84a7efa5af9f54c0aa7692c45861c1667080814286cacb9958c07fc50294fb"}, - {file = "rpds_py-0.19.0-cp39-none-win_amd64.whl", hash = "sha256:1c26da90b8d06227d7769f34915913911222d24ce08c0ab2d60b354e2d9c7aff"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a4b07cdf3f84310c08c1de2c12ddadbb7a77568bcb16e95489f9c81074322ed"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba0ed0dc6763d8bd6e5de5cf0d746d28e706a10b615ea382ac0ab17bb7388633"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:474bc83233abdcf2124ed3f66230a1c8435896046caa4b0b5ab6013c640803cc"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329c719d31362355a96b435f4653e3b4b061fcc9eba9f91dd40804ca637d914e"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef9101f3f7b59043a34f1dccbb385ca760467590951952d6701df0da9893ca0c"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0121803b0f424ee2109d6e1f27db45b166ebaa4b32ff47d6aa225642636cd834"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8344127403dea42f5970adccf6c5957a71a47f522171fafaf4c6ddb41b61703a"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:443cec402ddd650bb2b885113e1dcedb22b1175c6be223b14246a714b61cd521"}, - {file = "rpds_py-0.19.0.tar.gz", hash = "sha256:4fdc9afadbeb393b4bbbad75481e0ea78e4469f2e1d713a90811700830b553a9"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, ] [[package]] @@ -3465,130 +3843,123 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "s3transfer" -version = "0.10.2" -description = "An Amazon S3 Transfer Manager" -optional = false -python-versions = ">=3.8" -files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, -] - -[package.dependencies] -botocore = ">=1.33.2,<2.0a.0" - -[package.extras] -crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] - [[package]] name = "safetensors" -version = "0.4.3" +version = "0.4.5" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "safetensors-0.4.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dcf5705cab159ce0130cd56057f5f3425023c407e170bca60b4868048bae64fd"}, - {file = "safetensors-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb4f8c5d0358a31e9a08daeebb68f5e161cdd4018855426d3f0c23bb51087055"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a5319ef409e7f88686a46607cbc3c428271069d8b770076feaf913664a07ac"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb9c65bd82f9ef3ce4970dc19ee86be5f6f93d032159acf35e663c6bea02b237"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edb5698a7bc282089f64c96c477846950358a46ede85a1c040e0230344fdde10"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efcc860be094b8d19ac61b452ec635c7acb9afa77beb218b1d7784c6d41fe8ad"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d88b33980222085dd6001ae2cad87c6068e0991d4f5ccf44975d216db3b57376"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5fc6775529fb9f0ce2266edd3e5d3f10aab068e49f765e11f6f2a63b5367021d"}, - {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9c6ad011c1b4e3acff058d6b090f1da8e55a332fbf84695cf3100c649cc452d1"}, - {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c496c5401c1b9c46d41a7688e8ff5b0310a3b9bae31ce0f0ae870e1ea2b8caf"}, - {file = "safetensors-0.4.3-cp310-none-win32.whl", hash = "sha256:38e2a8666178224a51cca61d3cb4c88704f696eac8f72a49a598a93bbd8a4af9"}, - {file = "safetensors-0.4.3-cp310-none-win_amd64.whl", hash = "sha256:393e6e391467d1b2b829c77e47d726f3b9b93630e6a045b1d1fca67dc78bf632"}, - {file = "safetensors-0.4.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:22f3b5d65e440cec0de8edaa672efa888030802e11c09b3d6203bff60ebff05a"}, - {file = "safetensors-0.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c4fa560ebd4522adddb71dcd25d09bf211b5634003f015a4b815b7647d62ebe"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9afd5358719f1b2cf425fad638fc3c887997d6782da317096877e5b15b2ce93"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8c5093206ef4b198600ae484230402af6713dab1bd5b8e231905d754022bec7"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0b2104df1579d6ba9052c0ae0e3137c9698b2d85b0645507e6fd1813b70931a"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cf18888606dad030455d18f6c381720e57fc6a4170ee1966adb7ebc98d4d6a3"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bf4f9d6323d9f86eef5567eabd88f070691cf031d4c0df27a40d3b4aaee755b"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:585c9ae13a205807b63bef8a37994f30c917ff800ab8a1ca9c9b5d73024f97ee"}, - {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faefeb3b81bdfb4e5a55b9bbdf3d8d8753f65506e1d67d03f5c851a6c87150e9"}, - {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:befdf0167ad626f22f6aac6163477fcefa342224a22f11fdd05abb3995c1783c"}, - {file = "safetensors-0.4.3-cp311-none-win32.whl", hash = "sha256:a7cef55929dcbef24af3eb40bedec35d82c3c2fa46338bb13ecf3c5720af8a61"}, - {file = "safetensors-0.4.3-cp311-none-win_amd64.whl", hash = "sha256:840b7ac0eff5633e1d053cc9db12fdf56b566e9403b4950b2dc85393d9b88d67"}, - {file = "safetensors-0.4.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:22d21760dc6ebae42e9c058d75aa9907d9f35e38f896e3c69ba0e7b213033856"}, - {file = "safetensors-0.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d22c1a10dff3f64d0d68abb8298a3fd88ccff79f408a3e15b3e7f637ef5c980"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1648568667f820b8c48317c7006221dc40aced1869908c187f493838a1362bc"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:446e9fe52c051aeab12aac63d1017e0f68a02a92a027b901c4f8e931b24e5397"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fef5d70683643618244a4f5221053567ca3e77c2531e42ad48ae05fae909f542"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a1f4430cc0c9d6afa01214a4b3919d0a029637df8e09675ceef1ca3f0dfa0df"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d603846a8585b9432a0fd415db1d4c57c0f860eb4aea21f92559ff9902bae4d"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a844cdb5d7cbc22f5f16c7e2a0271170750763c4db08381b7f696dbd2c78a361"}, - {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:88887f69f7a00cf02b954cdc3034ffb383b2303bc0ab481d4716e2da51ddc10e"}, - {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ee463219d9ec6c2be1d331ab13a8e0cd50d2f32240a81d498266d77d07b7e71e"}, - {file = "safetensors-0.4.3-cp312-none-win32.whl", hash = "sha256:d0dd4a1db09db2dba0f94d15addc7e7cd3a7b0d393aa4c7518c39ae7374623c3"}, - {file = "safetensors-0.4.3-cp312-none-win_amd64.whl", hash = "sha256:d14d30c25897b2bf19b6fb5ff7e26cc40006ad53fd4a88244fdf26517d852dd7"}, - {file = "safetensors-0.4.3-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d1456f814655b224d4bf6e7915c51ce74e389b413be791203092b7ff78c936dd"}, - {file = "safetensors-0.4.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:455d538aa1aae4a8b279344a08136d3f16334247907b18a5c3c7fa88ef0d3c46"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf476bca34e1340ee3294ef13e2c625833f83d096cfdf69a5342475602004f95"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02ef3a24face643456020536591fbd3c717c5abaa2737ec428ccbbc86dffa7a4"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7de32d0d34b6623bb56ca278f90db081f85fb9c5d327e3c18fd23ac64f465768"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a0deb16a1d3ea90c244ceb42d2c6c276059616be21a19ac7101aa97da448faf"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c59d51f182c729f47e841510b70b967b0752039f79f1de23bcdd86462a9b09ee"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f598b713cc1a4eb31d3b3203557ac308acf21c8f41104cdd74bf640c6e538e3"}, - {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5757e4688f20df083e233b47de43845d1adb7e17b6cf7da5f8444416fc53828d"}, - {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fe746d03ed8d193674a26105e4f0fe6c726f5bb602ffc695b409eaf02f04763d"}, - {file = "safetensors-0.4.3-cp37-none-win32.whl", hash = "sha256:0d5ffc6a80f715c30af253e0e288ad1cd97a3d0086c9c87995e5093ebc075e50"}, - {file = "safetensors-0.4.3-cp37-none-win_amd64.whl", hash = "sha256:a11c374eb63a9c16c5ed146457241182f310902bd2a9c18255781bb832b6748b"}, - {file = "safetensors-0.4.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1e31be7945f66be23f4ec1682bb47faa3df34cb89fc68527de6554d3c4258a4"}, - {file = "safetensors-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:03a4447c784917c9bf01d8f2ac5080bc15c41692202cd5f406afba16629e84d6"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d244bcafeb1bc06d47cfee71727e775bca88a8efda77a13e7306aae3813fa7e4"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53c4879b9c6bd7cd25d114ee0ef95420e2812e676314300624594940a8d6a91f"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74707624b81f1b7f2b93f5619d4a9f00934d5948005a03f2c1845ffbfff42212"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d52c958dc210265157573f81d34adf54e255bc2b59ded6218500c9b15a750eb"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f9568f380f513a60139971169c4a358b8731509cc19112369902eddb33faa4d"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d9cd8e1560dfc514b6d7859247dc6a86ad2f83151a62c577428d5102d872721"}, - {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:89f9f17b0dacb913ed87d57afbc8aad85ea42c1085bd5de2f20d83d13e9fc4b2"}, - {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1139eb436fd201c133d03c81209d39ac57e129f5e74e34bb9ab60f8d9b726270"}, - {file = "safetensors-0.4.3-cp38-none-win32.whl", hash = "sha256:d9c289f140a9ae4853fc2236a2ffc9a9f2d5eae0cb673167e0f1b8c18c0961ac"}, - {file = "safetensors-0.4.3-cp38-none-win_amd64.whl", hash = "sha256:622afd28968ef3e9786562d352659a37de4481a4070f4ebac883f98c5836563e"}, - {file = "safetensors-0.4.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8651c7299cbd8b4161a36cd6a322fa07d39cd23535b144d02f1c1972d0c62f3c"}, - {file = "safetensors-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e375d975159ac534c7161269de24ddcd490df2157b55c1a6eeace6cbb56903f0"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084fc436e317f83f7071fc6a62ca1c513b2103db325cd09952914b50f51cf78f"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41a727a7f5e6ad9f1db6951adee21bbdadc632363d79dc434876369a17de6ad6"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7dbbde64b6c534548696808a0e01276d28ea5773bc9a2dfb97a88cd3dffe3df"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbae3b4b9d997971431c346edbfe6e41e98424a097860ee872721e176040a893"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01e4b22e3284cd866edeabe4f4d896229495da457229408d2e1e4810c5187121"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dd37306546b58d3043eb044c8103a02792cc024b51d1dd16bd3dd1f334cb3ed"}, - {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8815b5e1dac85fc534a97fd339e12404db557878c090f90442247e87c8aeaea"}, - {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e011cc162503c19f4b1fd63dfcddf73739c7a243a17dac09b78e57a00983ab35"}, - {file = "safetensors-0.4.3-cp39-none-win32.whl", hash = "sha256:01feb3089e5932d7e662eda77c3ecc389f97c0883c4a12b5cfdc32b589a811c3"}, - {file = "safetensors-0.4.3-cp39-none-win_amd64.whl", hash = "sha256:3f9cdca09052f585e62328c1c2923c70f46814715c795be65f0b93f57ec98a02"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1b89381517891a7bb7d1405d828b2bf5d75528299f8231e9346b8eba092227f9"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cd6fff9e56df398abc5866b19a32124815b656613c1c5ec0f9350906fd798aac"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:840caf38d86aa7014fe37ade5d0d84e23dcfbc798b8078015831996ecbc206a3"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9650713b2cfa9537a2baf7dd9fee458b24a0aaaa6cafcea8bdd5fb2b8efdc34"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4119532cd10dba04b423e0f86aecb96cfa5a602238c0aa012f70c3a40c44b50"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e066e8861eef6387b7c772344d1fe1f9a72800e04ee9a54239d460c400c72aab"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:90964917f5b0fa0fa07e9a051fbef100250c04d150b7026ccbf87a34a54012e0"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c41e1893d1206aa7054029681778d9a58b3529d4c807002c156d58426c225173"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae7613a119a71a497d012ccc83775c308b9c1dab454806291427f84397d852fd"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9bac020faba7f5dc481e881b14b6425265feabb5bfc552551d21189c0eddc3"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:420a98f593ff9930f5822560d14c395ccbc57342ddff3b463bc0b3d6b1951550"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f5e6883af9a68c0028f70a4c19d5a6ab6238a379be36ad300a22318316c00cb0"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:cdd0a3b5da66e7f377474599814dbf5cbf135ff059cc73694de129b58a5e8a2c"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9bfb92f82574d9e58401d79c70c716985dc049b635fef6eecbb024c79b2c46ad"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3615a96dd2dcc30eb66d82bc76cda2565f4f7bfa89fcb0e31ba3cea8a1a9ecbb"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868ad1b6fc41209ab6bd12f63923e8baeb1a086814cb2e81a65ed3d497e0cf8f"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffba80aa49bd09195145a7fd233a7781173b422eeb995096f2b30591639517"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0acbe31340ab150423347e5b9cc595867d814244ac14218932a5cf1dd38eb39"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19bbdf95de2cf64f25cd614c5236c8b06eb2cfa47cbf64311f4b5d80224623a3"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b852e47eb08475c2c1bd8131207b405793bfc20d6f45aff893d3baaad449ed14"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d07cbca5b99babb692d76d8151bec46f461f8ad8daafbfd96b2fca40cadae65"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1ab6527a20586d94291c96e00a668fa03f86189b8a9defa2cdd34a1a01acc7d5"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02318f01e332cc23ffb4f6716e05a492c5f18b1d13e343c49265149396284a44"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec4b52ce9a396260eb9731eb6aea41a7320de22ed73a1042c2230af0212758ce"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:018b691383026a2436a22b648873ed11444a364324e7088b99cd2503dd828400"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:309b10dbcab63269ecbf0e2ca10ce59223bb756ca5d431ce9c9eeabd446569da"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b277482120df46e27a58082df06a15aebda4481e30a1c21eefd0921ae7e03f65"}, - {file = "safetensors-0.4.3.tar.gz", hash = "sha256:2f85fc50c4e07a21e95c24e07460fe6f7e2859d0ce88092838352b798ce711c2"}, + {file = "safetensors-0.4.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a63eaccd22243c67e4f2b1c3e258b257effc4acd78f3b9d397edc8cf8f1298a7"}, + {file = "safetensors-0.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:23fc9b4ec7b602915cbb4ec1a7c1ad96d2743c322f20ab709e2c35d1b66dad27"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6885016f34bef80ea1085b7e99b3c1f92cb1be78a49839203060f67b40aee761"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:133620f443450429322f238fda74d512c4008621227fccf2f8cf4a76206fea7c"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3e0609ec12d2a77e882f07cced530b8262027f64b75d399f1504ffec0ba56"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0f1dd769f064adc33831f5e97ad07babbd728427f98e3e1db6902e369122737"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6d156bdb26732feada84f9388a9f135528c1ef5b05fae153da365ad4319c4c5"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e347d77e2c77eb7624400ccd09bed69d35c0332f417ce8c048d404a096c593b"}, + {file = "safetensors-0.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9f556eea3aec1d3d955403159fe2123ddd68e880f83954ee9b4a3f2e15e716b6"}, + {file = "safetensors-0.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9483f42be3b6bc8ff77dd67302de8ae411c4db39f7224dec66b0eb95822e4163"}, + {file = "safetensors-0.4.5-cp310-none-win32.whl", hash = "sha256:7389129c03fadd1ccc37fd1ebbc773f2b031483b04700923c3511d2a939252cc"}, + {file = "safetensors-0.4.5-cp310-none-win_amd64.whl", hash = "sha256:e98ef5524f8b6620c8cdef97220c0b6a5c1cef69852fcd2f174bb96c2bb316b1"}, + {file = "safetensors-0.4.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:21f848d7aebd5954f92538552d6d75f7c1b4500f51664078b5b49720d180e47c"}, + {file = "safetensors-0.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb07000b19d41e35eecef9a454f31a8b4718a185293f0d0b1c4b61d6e4487971"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09dedf7c2fda934ee68143202acff6e9e8eb0ddeeb4cfc24182bef999efa9f42"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59b77e4b7a708988d84f26de3ebead61ef1659c73dcbc9946c18f3b1786d2688"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d3bc83e14d67adc2e9387e511097f254bd1b43c3020440e708858c684cbac68"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39371fc551c1072976073ab258c3119395294cf49cdc1f8476794627de3130df"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6c19feda32b931cae0acd42748a670bdf56bee6476a046af20181ad3fee4090"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a659467495de201e2f282063808a41170448c78bada1e62707b07a27b05e6943"}, + {file = "safetensors-0.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bad5e4b2476949bcd638a89f71b6916fa9a5cae5c1ae7eede337aca2100435c0"}, + {file = "safetensors-0.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a3a315a6d0054bc6889a17f5668a73f94f7fe55121ff59e0a199e3519c08565f"}, + {file = "safetensors-0.4.5-cp311-none-win32.whl", hash = "sha256:a01e232e6d3d5cf8b1667bc3b657a77bdab73f0743c26c1d3c5dd7ce86bd3a92"}, + {file = "safetensors-0.4.5-cp311-none-win_amd64.whl", hash = "sha256:cbd39cae1ad3e3ef6f63a6f07296b080c951f24cec60188378e43d3713000c04"}, + {file = "safetensors-0.4.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:473300314e026bd1043cef391bb16a8689453363381561b8a3e443870937cc1e"}, + {file = "safetensors-0.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:801183a0f76dc647f51a2d9141ad341f9665602a7899a693207a82fb102cc53e"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1524b54246e422ad6fb6aea1ac71edeeb77666efa67230e1faf6999df9b2e27f"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b3139098e3e8b2ad7afbca96d30ad29157b50c90861084e69fcb80dec7430461"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65573dc35be9059770808e276b017256fa30058802c29e1038eb1c00028502ea"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd33da8e9407559f8779c82a0448e2133737f922d71f884da27184549416bfed"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3685ce7ed036f916316b567152482b7e959dc754fcc4a8342333d222e05f407c"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dde2bf390d25f67908278d6f5d59e46211ef98e44108727084d4637ee70ab4f1"}, + {file = "safetensors-0.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7469d70d3de970b1698d47c11ebbf296a308702cbaae7fcb993944751cf985f4"}, + {file = "safetensors-0.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a6ba28118636a130ccbb968bc33d4684c48678695dba2590169d5ab03a45646"}, + {file = "safetensors-0.4.5-cp312-none-win32.whl", hash = "sha256:c859c7ed90b0047f58ee27751c8e56951452ed36a67afee1b0a87847d065eec6"}, + {file = "safetensors-0.4.5-cp312-none-win_amd64.whl", hash = "sha256:b5a8810ad6a6f933fff6c276eae92c1da217b39b4d8b1bc1c0b8af2d270dc532"}, + {file = "safetensors-0.4.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:25e5f8e2e92a74f05b4ca55686234c32aac19927903792b30ee6d7bd5653d54e"}, + {file = "safetensors-0.4.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:81efb124b58af39fcd684254c645e35692fea81c51627259cdf6d67ff4458916"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:585f1703a518b437f5103aa9cf70e9bd437cb78eea9c51024329e4fb8a3e3679"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b99fbf72e3faf0b2f5f16e5e3458b93b7d0a83984fe8d5364c60aa169f2da89"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b17b299ca9966ca983ecda1c0791a3f07f9ca6ab5ded8ef3d283fff45f6bcd5f"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76ded72f69209c9780fdb23ea89e56d35c54ae6abcdec67ccb22af8e696e449a"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2783956926303dcfeb1de91a4d1204cd4089ab441e622e7caee0642281109db3"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d94581aab8c6b204def4d7320f07534d6ee34cd4855688004a4354e63b639a35"}, + {file = "safetensors-0.4.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:67e1e7cb8678bb1b37ac48ec0df04faf689e2f4e9e81e566b5c63d9f23748523"}, + {file = "safetensors-0.4.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbd280b07e6054ea68b0cb4b16ad9703e7d63cd6890f577cb98acc5354780142"}, + {file = "safetensors-0.4.5-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:77d9b228da8374c7262046a36c1f656ba32a93df6cc51cd4453af932011e77f1"}, + {file = "safetensors-0.4.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:500cac01d50b301ab7bb192353317035011c5ceeef0fca652f9f43c000bb7f8d"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75331c0c746f03158ded32465b7d0b0e24c5a22121743662a2393439c43a45cf"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670e95fe34e0d591d0529e5e59fd9d3d72bc77b1444fcaa14dccda4f36b5a38b"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:098923e2574ff237c517d6e840acada8e5b311cb1fa226019105ed82e9c3b62f"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ca0902d2648775089fa6a0c8fc9e6390c5f8ee576517d33f9261656f851e3f"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f0032bedc869c56f8d26259fe39cd21c5199cd57f2228d817a0e23e8370af25"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4b15f51b4f8f2a512341d9ce3475cacc19c5fdfc5db1f0e19449e75f95c7dc8"}, + {file = "safetensors-0.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f6594d130d0ad933d885c6a7b75c5183cb0e8450f799b80a39eae2b8508955eb"}, + {file = "safetensors-0.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:60c828a27e852ded2c85fc0f87bf1ec20e464c5cd4d56ff0e0711855cc2e17f8"}, + {file = "safetensors-0.4.5-cp37-none-win32.whl", hash = "sha256:6d3de65718b86c3eeaa8b73a9c3d123f9307a96bbd7be9698e21e76a56443af5"}, + {file = "safetensors-0.4.5-cp37-none-win_amd64.whl", hash = "sha256:5a2d68a523a4cefd791156a4174189a4114cf0bf9c50ceb89f261600f3b2b81a"}, + {file = "safetensors-0.4.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:e7a97058f96340850da0601a3309f3d29d6191b0702b2da201e54c6e3e44ccf0"}, + {file = "safetensors-0.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:63bfd425e25f5c733f572e2246e08a1c38bd6f2e027d3f7c87e2e43f228d1345"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3664ac565d0e809b0b929dae7ccd74e4d3273cd0c6d1220c6430035befb678e"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:313514b0b9b73ff4ddfb4edd71860696dbe3c1c9dc4d5cc13dbd74da283d2cbf"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31fa33ee326f750a2f2134a6174773c281d9a266ccd000bd4686d8021f1f3dac"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09566792588d77b68abe53754c9f1308fadd35c9f87be939e22c623eaacbed6b"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309aaec9b66cbf07ad3a2e5cb8a03205663324fea024ba391594423d0f00d9fe"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:53946c5813b8f9e26103c5efff4a931cc45d874f45229edd68557ffb35ffb9f8"}, + {file = "safetensors-0.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:868f9df9e99ad1e7f38c52194063a982bc88fedc7d05096f4f8160403aaf4bd6"}, + {file = "safetensors-0.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9cc9449bd0b0bc538bd5e268221f0c5590bc5c14c1934a6ae359d44410dc68c4"}, + {file = "safetensors-0.4.5-cp38-none-win32.whl", hash = "sha256:83c4f13a9e687335c3928f615cd63a37e3f8ef072a3f2a0599fa09f863fb06a2"}, + {file = "safetensors-0.4.5-cp38-none-win_amd64.whl", hash = "sha256:b98d40a2ffa560653f6274e15b27b3544e8e3713a44627ce268f419f35c49478"}, + {file = "safetensors-0.4.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cf727bb1281d66699bef5683b04d98c894a2803442c490a8d45cd365abfbdeb2"}, + {file = "safetensors-0.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96f1d038c827cdc552d97e71f522e1049fef0542be575421f7684756a748e457"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:139fbee92570ecea774e6344fee908907db79646d00b12c535f66bc78bd5ea2c"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c36302c1c69eebb383775a89645a32b9d266878fab619819ce660309d6176c9b"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d641f5b8149ea98deb5ffcf604d764aad1de38a8285f86771ce1abf8e74c4891"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b4db6a61d968de73722b858038c616a1bebd4a86abe2688e46ca0cc2d17558f2"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b75a616e02f21b6f1d5785b20cecbab5e2bd3f6358a90e8925b813d557666ec1"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:788ee7d04cc0e0e7f944c52ff05f52a4415b312f5efd2ee66389fb7685ee030c"}, + {file = "safetensors-0.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87bc42bd04fd9ca31396d3ca0433db0be1411b6b53ac5a32b7845a85d01ffc2e"}, + {file = "safetensors-0.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4037676c86365a721a8c9510323a51861d703b399b78a6b4486a54a65a975fca"}, + {file = "safetensors-0.4.5-cp39-none-win32.whl", hash = "sha256:1500418454529d0ed5c1564bda376c4ddff43f30fce9517d9bee7bcce5a8ef50"}, + {file = "safetensors-0.4.5-cp39-none-win_amd64.whl", hash = "sha256:9d1a94b9d793ed8fe35ab6d5cea28d540a46559bafc6aae98f30ee0867000cab"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fdadf66b5a22ceb645d5435a0be7a0292ce59648ca1d46b352f13cff3ea80410"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d42ffd4c2259f31832cb17ff866c111684c87bd930892a1ba53fed28370c918c"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd8a1f6d2063a92cd04145c7fd9e31a1c7d85fbec20113a14b487563fdbc0597"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:951d2fcf1817f4fb0ef0b48f6696688a4e852a95922a042b3f96aaa67eedc920"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ac85d9a8c1af0e3132371d9f2d134695a06a96993c2e2f0bbe25debb9e3f67a"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e3cec4a29eb7fe8da0b1c7988bc3828183080439dd559f720414450de076fcab"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:21742b391b859e67b26c0b2ac37f52c9c0944a879a25ad2f9f9f3cd61e7fda8f"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7db3006a4915151ce1913652e907cdede299b974641a83fbc092102ac41b644"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f68bf99ea970960a237f416ea394e266e0361895753df06e3e06e6ea7907d98b"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8158938cf3324172df024da511839d373c40fbfaa83e9abf467174b2910d7b4c"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:540ce6c4bf6b58cb0fd93fa5f143bc0ee341c93bb4f9287ccd92cf898cc1b0dd"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bfeaa1a699c6b9ed514bd15e6a91e74738b71125a9292159e3d6b7f0a53d2cde"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:01c8f00da537af711979e1b42a69a8ec9e1d7112f208e0e9b8a35d2c381085ef"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a0dd565f83b30f2ca79b5d35748d0d99dd4b3454f80e03dfb41f0038e3bdf180"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:023b6e5facda76989f4cba95a861b7e656b87e225f61811065d5c501f78cdb3f"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9633b663393d5796f0b60249549371e392b75a0b955c07e9c6f8708a87fc841f"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78dd8adfb48716233c45f676d6e48534d34b4bceb50162c13d1f0bdf6f78590a"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e8deb16c4321d61ae72533b8451ec4a9af8656d1c61ff81aa49f966406e4b68"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:52452fa5999dc50c4decaf0c53aa28371f7f1e0fe5c2dd9129059fbe1e1599c7"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d5f23198821e227cfc52d50fa989813513db381255c6d100927b012f0cfec63d"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f4beb84b6073b1247a773141a6331117e35d07134b3bb0383003f39971d414bb"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:68814d599d25ed2fdd045ed54d370d1d03cf35e02dce56de44c651f828fb9b7b"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b6453c54c57c1781292c46593f8a37254b8b99004c68d6c3ce229688931a22"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adaa9c6dead67e2dd90d634f89131e43162012479d86e25618e821a03d1eb1dc"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73e7d408e9012cd17511b382b43547850969c7979efc2bc353f317abaf23c84c"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:775409ce0fcc58b10773fdb4221ed1eb007de10fe7adbdf8f5e8a56096b6f0bc"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:834001bed193e4440c4a3950a31059523ee5090605c907c66808664c932b549c"}, + {file = "safetensors-0.4.5.tar.gz", hash = "sha256:d73de19682deabb02524b3d5d1f8b3aaba94c72f1bbfc7911b9b9d5d391c0310"}, ] [package.extras] @@ -3606,29 +3977,33 @@ torch = ["safetensors[numpy]", "torch (>=1.10)"] [[package]] name = "setuptools" -version = "72.1.0" +version = "75.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, - {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, + {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, + {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, ] [package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -3655,24 +4030,24 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] name = "sphinx" -version = "7.4.5" +version = "7.4.7" description = "Python documentation generator" optional = false python-versions = ">=3.9" files = [ - {file = "sphinx-7.4.5-py3-none-any.whl", hash = "sha256:9f135d8c1d277db67be514be579c4c4a26c8c0e962219aaca5a721b04bd6d0d8"}, - {file = "sphinx-7.4.5.tar.gz", hash = "sha256:a4abe5385bf856df094c1e6cadf24a2351b12057be3670b99a12c05a01d209f5"}, + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, ] [package.dependencies] @@ -3720,73 +4095,74 @@ rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] [[package]] name = "sphinx-design" -version = "0.6.0" +version = "0.6.1" description = "A sphinx extension for designing beautiful, view size responsive web components." optional = false python-versions = ">=3.9" files = [ - {file = "sphinx_design-0.6.0-py3-none-any.whl", hash = "sha256:e9bd07eecec82eb07ff72cb50fc3624e186b04f5661270bc7b62db86c7546e95"}, - {file = "sphinx_design-0.6.0.tar.gz", hash = "sha256:ec8e3c5c59fed4049b3a5a2e209360feab31829346b5f6a0c7c342b894082192"}, + {file = "sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c"}, + {file = "sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632"}, ] [package.dependencies] -sphinx = ">=5,<8" +sphinx = ">=6,<9" [package.extras] code-style = ["pre-commit (>=3,<4)"] -rtd = ["myst-parser (>=1,<3)"] -testing = ["defusedxml", "myst-parser (>=1,<3)", "pytest (>=7.1,<8.0)", "pytest-cov", "pytest-regressions"] -theme-furo = ["furo (>=2024.5.4,<2024.6.0)"] -theme-im = ["sphinx-immaterial (>=0.11.11,<0.12.0)"] +rtd = ["myst-parser (>=2,<4)"] +testing = ["defusedxml", "myst-parser (>=2,<4)", "pytest (>=8.3,<9.0)", "pytest-cov", "pytest-regressions"] +testing-no-myst = ["defusedxml", "pytest (>=8.3,<9.0)", "pytest-cov", "pytest-regressions"] +theme-furo = ["furo (>=2024.7.18,<2024.8.0)"] +theme-im = ["sphinx-immaterial (>=0.12.2,<0.13.0)"] theme-pydata = ["pydata-sphinx-theme (>=0.15.2,<0.16.0)"] theme-rtd = ["sphinx-rtd-theme (>=2.0,<3.0)"] theme-sbt = ["sphinx-book-theme (>=1.1,<2.0)"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.8" +version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, - {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.6" +version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, - {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.5" +version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, - {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] @@ -3806,92 +4182,100 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.7" +version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, - {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] -test = ["pytest"] +test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.10" +version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, - {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sqlalchemy" -version = "2.0.31" +version = "2.0.36" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"}, - {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, - {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] [package.dependencies] @@ -3904,7 +4288,7 @@ aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] @@ -3994,13 +4378,13 @@ blobfile = ["blobfile (>=2)"] [[package]] name = "tinycss2" -version = "1.3.0" +version = "1.4.0" description = "A tiny CSS parser" optional = false python-versions = ">=3.8" files = [ - {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, - {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, + {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, + {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, ] [package.dependencies] @@ -4012,111 +4396,26 @@ test = ["pytest", "ruff"] [[package]] name = "tokenizers" -version = "0.19.1" +version = "0.21.0" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97"}, - {file = "tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3"}, - {file = "tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837"}, - {file = "tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b"}, - {file = "tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256"}, - {file = "tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c"}, - {file = "tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57"}, - {file = "tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:bb9dfe7dae85bc6119d705a76dc068c062b8b575abe3595e3c6276480e67e3f1"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:1f0360cbea28ea99944ac089c00de7b2e3e1c58f479fb8613b6d8d511ce98267"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:71e3ec71f0e78780851fef28c2a9babe20270404c921b756d7c532d280349214"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b82931fa619dbad979c0ee8e54dd5278acc418209cc897e42fac041f5366d626"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e8ff5b90eabdcdaa19af697885f70fe0b714ce16709cf43d4952f1f85299e73a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e742d76ad84acbdb1a8e4694f915fe59ff6edc381c97d6dfdd054954e3478ad4"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8c5d59d7b59885eab559d5bc082b2985555a54cda04dda4c65528d90ad252ad"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2da5c32ed869bebd990c9420df49813709e953674c0722ff471a116d97b22d"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:638e43936cc8b2cbb9f9d8dde0fe5e7e30766a3318d2342999ae27f68fdc9bd6"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:78e769eb3b2c79687d9cb0f89ef77223e8e279b75c0a968e637ca7043a84463f"}, - {file = "tokenizers-0.19.1-cp37-none-win32.whl", hash = "sha256:72791f9bb1ca78e3ae525d4782e85272c63faaef9940d92142aa3eb79f3407a3"}, - {file = "tokenizers-0.19.1-cp37-none-win_amd64.whl", hash = "sha256:f3bbb7a0c5fcb692950b041ae11067ac54826204318922da754f908d95619fbc"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:07f9295349bbbcedae8cefdbcfa7f686aa420be8aca5d4f7d1ae6016c128c0c5"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10a707cc6c4b6b183ec5dbfc5c34f3064e18cf62b4a938cb41699e33a99e03c1"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6309271f57b397aa0aff0cbbe632ca9d70430839ca3178bf0f06f825924eca22"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad23d37d68cf00d54af184586d79b84075ada495e7c5c0f601f051b162112dc"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:427c4f0f3df9109314d4f75b8d1f65d9477033e67ffaec4bca53293d3aca286d"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e83a31c9cf181a0a3ef0abad2b5f6b43399faf5da7e696196ddd110d332519ee"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c27b99889bd58b7e301468c0838c5ed75e60c66df0d4db80c08f43462f82e0d3"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bac0b0eb952412b0b196ca7a40e7dce4ed6f6926489313414010f2e6b9ec2adf"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6298bde623725ca31c9035a04bf2ef63208d266acd2bed8c2cb7d2b7d53ce6"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08a44864e42fa6d7d76d7be4bec62c9982f6f6248b4aa42f7302aa01e0abfd26"}, - {file = "tokenizers-0.19.1-cp38-none-win32.whl", hash = "sha256:1de5bc8652252d9357a666e609cb1453d4f8e160eb1fb2830ee369dd658e8975"}, - {file = "tokenizers-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:0bcce02bf1ad9882345b34d5bd25ed4949a480cf0e656bbd468f4d8986f7a3f1"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0b9394bd204842a2a1fd37fe29935353742be4a3460b6ccbaefa93f58a8df43d"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4692ab92f91b87769d950ca14dbb61f8a9ef36a62f94bad6c82cc84a51f76f6a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6258c2ef6f06259f70a682491c78561d492e885adeaf9f64f5389f78aa49a051"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85cf76561fbd01e0d9ea2d1cbe711a65400092bc52b5242b16cfd22e51f0c58"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670b802d4d82bbbb832ddb0d41df7015b3e549714c0e77f9bed3e74d42400fbe"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85aa3ab4b03d5e99fdd31660872249df5e855334b6c333e0bc13032ff4469c4a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf001afbbed111a79ca47d75941e9e5361297a87d186cbfc11ed45e30b5daba"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c89aa46c269e4e70c4d4f9d6bc644fcc39bb409cb2a81227923404dd6f5227"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39c1ec76ea1027438fafe16ecb0fb84795e62e9d643444c1090179e63808c69d"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c2a0d47a89b48d7daa241e004e71fb5a50533718897a4cd6235cb846d511a478"}, - {file = "tokenizers-0.19.1-cp39-none-win32.whl", hash = "sha256:61b7fe8886f2e104d4caf9218b157b106207e0f2a4905c9c7ac98890688aabeb"}, - {file = "tokenizers-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:f97660f6c43efd3e0bfd3f2e3e5615bf215680bad6ee3d469df6454b8c6e8256"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b4399b59d1af5645bcee2072a463318114c39b8547437a7c2d6a186a1b5a0e2d"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6852c5b2a853b8b0ddc5993cd4f33bfffdca4fcc5d52f89dd4b8eada99379285"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd266ae85c3d39df2f7e7d0e07f6c41a55e9a3123bb11f854412952deacd828"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecb2651956eea2aa0a2d099434134b1b68f1c31f9a5084d6d53f08ed43d45ff2"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b279ab506ec4445166ac476fb4d3cc383accde1ea152998509a94d82547c8e2a"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:89183e55fb86e61d848ff83753f64cded119f5d6e1f553d14ffee3700d0a4a49"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2edbc75744235eea94d595a8b70fe279dd42f3296f76d5a86dde1d46e35f574"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0e64bfde9a723274e9a71630c3e9494ed7b4c0f76a1faacf7fe294cd26f7ae7c"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b5ca92bfa717759c052e345770792d02d1f43b06f9e790ca0a1db62838816f3"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f8a20266e695ec9d7a946a019c1d5ca4eddb6613d4f466888eee04f16eedb85"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c38f45d8f2a2ec0f3a20073cccb335b9f99f73b3c69483cd52ebc75369d8a1"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dd26e3afe8a7b61422df3176e06664503d3f5973b94f45d5c45987e1cb711876"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:eddd5783a4a6309ce23432353cdb36220e25cbb779bfa9122320666508b44b88"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:56ae39d4036b753994476a1b935584071093b55c7a72e3b8288e68c313ca26e7"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f9939ca7e58c2758c01b40324a59c034ce0cebad18e0d4563a9b1beab3018243"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c330c0eb815d212893c67a032e9dc1b38a803eccb32f3e8172c19cc69fbb439"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec11802450a2487cdf0e634b750a04cbdc1c4d066b97d94ce7dd2cb51ebb325b"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b718f316b596f36e1dae097a7d5b91fc5b85e90bf08b01ff139bd8953b25af"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ed69af290c2b65169f0ba9034d1dc39a5db9459b32f1dd8b5f3f32a3fcf06eab"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f8a9c828277133af13f3859d1b6bf1c3cb6e9e1637df0e45312e6b7c2e622b1f"}, - {file = "tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3"}, + {file = "tokenizers-0.21.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3c4c93eae637e7d2aaae3d376f06085164e1660f89304c0ab2b1d08a406636b2"}, + {file = "tokenizers-0.21.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:f53ea537c925422a2e0e92a24cce96f6bc5046bbef24a1652a5edc8ba975f62e"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b177fb54c4702ef611de0c069d9169f0004233890e0c4c5bd5508ae05abf193"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b43779a269f4629bebb114e19c3fca0223296ae9fea8bb9a7a6c6fb0657ff8e"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aeb255802be90acfd363626753fda0064a8df06031012fe7d52fd9a905eb00e"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8b09dbeb7a8d73ee204a70f94fc06ea0f17dcf0844f16102b9f414f0b7463ba"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:400832c0904f77ce87c40f1a8a27493071282f785724ae62144324f171377273"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84ca973b3a96894d1707e189c14a774b701596d579ffc7e69debfc036a61a04"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:eb7202d231b273c34ec67767378cd04c767e967fda12d4a9e36208a34e2f137e"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:089d56db6782a73a27fd8abf3ba21779f5b85d4a9f35e3b493c7bbcbbf0d539b"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:c87ca3dc48b9b1222d984b6b7490355a6fdb411a2d810f6f05977258400ddb74"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4145505a973116f91bc3ac45988a92e618a6f83eb458f49ea0790df94ee243ff"}, + {file = "tokenizers-0.21.0-cp39-abi3-win32.whl", hash = "sha256:eb1702c2f27d25d9dd5b389cc1f2f51813e99f8ca30d9e25348db6585a97e24a"}, + {file = "tokenizers-0.21.0-cp39-abi3-win_amd64.whl", hash = "sha256:87841da5a25a3a5f70c102de371db120f41873b854ba65e52bccd57df5a3780c"}, + {file = "tokenizers-0.21.0.tar.gz", hash = "sha256:ee0894bf311b75b0c03079f33859ae4b2334d675d4e93f5a4132e1eae2834fe4"}, ] [package.dependencies] @@ -4129,42 +4428,69 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "torch" -version = "2.4.0" +version = "2.5.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = false python-versions = ">=3.8.0" files = [ - {file = "torch-2.4.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:4ed94583e244af51d6a8d28701ca5a9e02d1219e782f5a01dd401f90af17d8ac"}, - {file = "torch-2.4.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c4ca297b7bd58b506bfd6e78ffd14eb97c0e7797dcd7965df62f50bb575d8954"}, - {file = "torch-2.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:2497cbc7b3c951d69b276ca51fe01c2865db67040ac67f5fc20b03e41d16ea4a"}, - {file = "torch-2.4.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:685418ab93730efbee71528821ff54005596970dd497bf03c89204fb7e3f71de"}, - {file = "torch-2.4.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:e743adadd8c8152bb8373543964551a7cb7cc20ba898dc8f9c0cdbe47c283de0"}, - {file = "torch-2.4.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:7334325c0292cbd5c2eac085f449bf57d3690932eac37027e193ba775703c9e6"}, - {file = "torch-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:97730014da4c57ffacb3c09298c6ce05400606e890bd7a05008d13dd086e46b1"}, - {file = "torch-2.4.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:f169b4ea6dc93b3a33319611fcc47dc1406e4dd539844dcbd2dec4c1b96e166d"}, - {file = "torch-2.4.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:997084a0f9784d2a89095a6dc67c7925e21bf25dea0b3d069b41195016ccfcbb"}, - {file = "torch-2.4.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:bc3988e8b36d1e8b998d143255d9408d8c75da4ab6dd0dcfd23b623dfb0f0f57"}, - {file = "torch-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:3374128bbf7e62cdaed6c237bfd39809fbcfaa576bee91e904706840c3f2195c"}, - {file = "torch-2.4.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:91aaf00bfe1ffa44dc5b52809d9a95129fca10212eca3ac26420eb11727c6288"}, - {file = "torch-2.4.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cc30457ea5489c62747d3306438af00c606b509d78822a88f804202ba63111ed"}, - {file = "torch-2.4.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a046491aaf96d1215e65e1fa85911ef2ded6d49ea34c8df4d0638879f2402eef"}, - {file = "torch-2.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:688eec9240f3ce775f22e1e1a5ab9894f3d5fe60f3f586deb7dbd23a46a83916"}, - {file = "torch-2.4.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:3af4de2a618fb065e78404c4ba27a818a7b7957eaeff28c6c66ce7fb504b68b8"}, - {file = "torch-2.4.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:618808d3f610d5f180e47a697d4ec90b810953bb1e020f424b2ac7fb0884b545"}, - {file = "torch-2.4.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ed765d232d23566052ba83632ec73a4fccde00b4c94ad45d63b471b09d63b7a7"}, - {file = "torch-2.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2feb98ac470109472fb10dfef38622a7ee08482a16c357863ebc7bc7db7c8f7"}, - {file = "torch-2.4.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:8940fc8b97a4c61fdb5d46a368f21f4a3a562a17879e932eb51a5ec62310cb31"}, + {file = "torch-2.5.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:71328e1bbe39d213b8721678f9dcac30dfc452a46d586f1d514a6aa0a99d4744"}, + {file = "torch-2.5.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:34bfa1a852e5714cbfa17f27c49d8ce35e1b7af5608c4bc6e81392c352dbc601"}, + {file = "torch-2.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:32a037bd98a241df6c93e4c789b683335da76a2ac142c0973675b715102dc5fa"}, + {file = "torch-2.5.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:23d062bf70776a3d04dbe74db950db2a5245e1ba4f27208a87f0d743b0d06e86"}, + {file = "torch-2.5.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:de5b7d6740c4b636ef4db92be922f0edc425b65ed78c5076c43c42d362a45457"}, + {file = "torch-2.5.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:340ce0432cad0d37f5a31be666896e16788f1adf8ad7be481196b503dad675b9"}, + {file = "torch-2.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:603c52d2fe06433c18b747d25f5c333f9c1d58615620578c326d66f258686f9a"}, + {file = "torch-2.5.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:31f8c39660962f9ae4eeec995e3049b5492eb7360dd4f07377658ef4d728fa4c"}, + {file = "torch-2.5.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:ed231a4b3a5952177fafb661213d690a72caaad97d5824dd4fc17ab9e15cec03"}, + {file = "torch-2.5.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:3f4b7f10a247e0dcd7ea97dc2d3bfbfc90302ed36d7f3952b0008d0df264e697"}, + {file = "torch-2.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:73e58e78f7d220917c5dbfad1a40e09df9929d3b95d25e57d9f8558f84c9a11c"}, + {file = "torch-2.5.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:8c712df61101964eb11910a846514011f0b6f5920c55dbf567bff8a34163d5b1"}, + {file = "torch-2.5.1-cp313-cp313-manylinux1_x86_64.whl", hash = "sha256:9b61edf3b4f6e3b0e0adda8b3960266b9009d02b37555971f4d1c8f7a05afed7"}, + {file = "torch-2.5.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1f3b7fb3cf7ab97fae52161423f81be8c6b8afac8d9760823fd623994581e1a3"}, + {file = "torch-2.5.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:7974e3dce28b5a21fb554b73e1bc9072c25dde873fa00d54280861e7a009d7dc"}, + {file = "torch-2.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:46c817d3ea33696ad3b9df5e774dba2257e9a4cd3c4a3afbf92f6bb13ac5ce2d"}, + {file = "torch-2.5.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:8046768b7f6d35b85d101b4b38cba8aa2f3cd51952bc4c06a49580f2ce682291"}, ] [package.dependencies] @@ -4172,61 +4498,64 @@ filelock = "*" fsspec = "*" jinja2 = "*" networkx = "*" -nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cublas-cu12 = {version = "12.4.5.8", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-cupti-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-nvrtc-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-runtime-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cudnn-cu12 = {version = "9.1.0.70", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -sympy = "*" -triton = {version = "3.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""} +nvidia-cufft-cu12 = {version = "11.2.1.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-curand-cu12 = {version = "10.3.5.147", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusolver-cu12 = {version = "11.6.1.9", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusparse-cu12 = {version = "12.3.1.170", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nccl-cu12 = {version = "2.21.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nvjitlink-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nvtx-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +setuptools = {version = "*", markers = "python_version >= \"3.12\""} +sympy = {version = "1.13.1", markers = "python_version >= \"3.9\""} +triton = {version = "3.1.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""} typing-extensions = ">=4.8.0" [package.extras] opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.11.0)"] +optree = ["optree (>=0.12.0)"] [[package]] name = "tornado" -version = "6.4.1" +version = "6.4.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.8" files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, + {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, + {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, + {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, ] [[package]] name = "tqdm" -version = "4.66.4" +version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] @@ -4248,39 +4577,39 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "transformers" -version = "4.42.4" +version = "4.47.0" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "transformers-4.42.4-py3-none-any.whl", hash = "sha256:6d59061392d0f1da312af29c962df9017ff3c0108c681a56d1bc981004d16d24"}, - {file = "transformers-4.42.4.tar.gz", hash = "sha256:f956e25e24df851f650cb2c158b6f4352dfae9d702f04c113ed24fc36ce7ae2d"}, + {file = "transformers-4.47.0-py3-none-any.whl", hash = "sha256:a8e1bafdaae69abdda3cad638fe392e37c86d2ce0ecfcae11d60abb8f949ff4d"}, + {file = "transformers-4.47.0.tar.gz", hash = "sha256:f8ead7a5a4f6937bb507e66508e5e002dc5930f7b6122a9259c37b099d0f3b19"}, ] [package.dependencies] filelock = "*" -huggingface-hub = ">=0.23.2,<1.0" -numpy = ">=1.17,<2.0" +huggingface-hub = ">=0.24.0,<1.0" +numpy = ">=1.17" packaging = ">=20.0" pyyaml = ">=5.1" regex = "!=2019.12.17" requests = "*" safetensors = ">=0.4.1" -tokenizers = ">=0.19,<0.20" +tokenizers = ">=0.21,<0.22" tqdm = ">=4.27" [package.extras] -accelerate = ["accelerate (>=0.21.0)"] -agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] -all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision"] +accelerate = ["accelerate (>=0.26.0)"] +agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] +all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (<=1.0.11)", "tokenizers (>=0.21,<0.22)", "torch", "torchaudio", "torchvision"] audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -benchmark = ["optimum-benchmark (>=0.2.0)"] +benchmark = ["optimum-benchmark (>=0.3.0)"] codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.19,<0.20)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +deepspeed = ["accelerate (>=0.26.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.26.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk (<=3.8.1)", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "librosa", "nltk (<=3.8.1)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=1.0.11)", "tokenizers (>=0.21,<0.22)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "librosa", "nltk (<=3.8.1)", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.21,<0.22)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "libcst", "librosa", "nltk (<=3.8.1)", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=1.0.11)", "tokenizers (>=0.21,<0.22)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] ftfy = ["ftfy"] @@ -4291,46 +4620,42 @@ natten = ["natten (>=0.14.6,<0.15.0)"] onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "ruff (==0.4.4)", "urllib3 (<2.0.0)"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "libcst", "rich", "ruff (==0.5.1)", "urllib3 (<2.0.0)"] ray = ["ray[tune] (>=2.7.0)"] retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -ruff = ["ruff (==0.4.4)"] +ruff = ["ruff (==0.5.1)"] sagemaker = ["sagemaker (>=2.31.0)"] sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] serving = ["fastapi", "pydantic", "starlette", "uvicorn"] sigopt = ["sigopt"] sklearn = ["scikit-learn"] speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk (<=3.8.1)", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm (<=0.9.16)"] -tokenizers = ["tokenizers (>=0.19,<0.20)"] -torch = ["accelerate (>=0.21.0)", "torch"] +tiktoken = ["blobfile", "tiktoken"] +timm = ["timm (<=1.0.11)"] +tokenizers = ["tokenizers (>=0.21,<0.22)"] +torch = ["accelerate (>=0.26.0)", "torch"] torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.23.2,<1.0)", "importlib-metadata", "numpy (>=1.17,<2.0)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.19,<0.20)", "torch", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] +torchhub = ["filelock", "huggingface-hub (>=0.24.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.21,<0.22)", "torch", "tqdm (>=4.27)"] +video = ["av (==9.2.0)"] vision = ["Pillow (>=10.0.1,<=15.0)"] [[package]] name = "triton" -version = "3.0.0" +version = "3.1.0" description = "A language and compiler for custom Deep Learning operations" optional = false python-versions = "*" files = [ - {file = "triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a"}, - {file = "triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c"}, - {file = "triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb"}, - {file = "triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bcbf3b1c48af6a28011a5c40a5b3b9b5330530c3827716b5fbf6d7adcc1e53e9"}, - {file = "triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6e5727202f7078c56f91ff13ad0c1abab14a0e7f2c87e91b12b6f64f3e8ae609"}, - {file = "triton-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b052da883351fdf6be3d93cedae6db3b8e3988d3b09ed221bccecfa9612230"}, - {file = "triton-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd34f19a8582af96e6291d4afce25dac08cb2a5d218c599163761e8e0827208e"}, - {file = "triton-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d5e10de8c011adeb7c878c6ce0dd6073b14367749e34467f1cff2bde1b78253"}, - {file = "triton-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8903767951bf86ec960b4fe4e21bc970055afc65e9d57e916d79ae3c93665e3"}, - {file = "triton-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41004fb1ae9a53fcb3e970745feb87f0e3c94c6ce1ba86e95fa3b8537894bef7"}, + {file = "triton-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b0dd10a925263abbe9fa37dcde67a5e9b2383fc269fdf59f5657cac38c5d1d8"}, + {file = "triton-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f34f6e7885d1bf0eaaf7ba875a5f0ce6f3c13ba98f9503651c1e6dc6757ed5c"}, + {file = "triton-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8182f42fd8080a7d39d666814fa36c5e30cc00ea7eeeb1a2983dbb4c99a0fdc"}, + {file = "triton-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dadaca7fc24de34e180271b5cf864c16755702e9f63a16f62df714a8099126a"}, + {file = "triton-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aafa9a20cd0d9fee523cd4504aa7131807a864cd77dcf6efe7e981f18b8c6c11"}, ] [package.dependencies] @@ -4357,13 +4682,13 @@ types-urllib3 = "*" [[package]] name = "types-requests" -version = "2.32.0.20240712" +version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"}, - {file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"}, + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, ] [package.dependencies] @@ -4393,13 +4718,13 @@ files = [ [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] @@ -4415,13 +4740,13 @@ files = [ [[package]] name = "urllib3" -version = "1.26.19" +version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, - {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] @@ -4431,13 +4756,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -4459,240 +4784,252 @@ files = [ [[package]] name = "xxhash" -version = "3.4.1" +version = "3.5.0" description = "Python binding for xxHash" optional = false python-versions = ">=3.7" files = [ - {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, - {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, - {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, - {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, - {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, - {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, - {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, - {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, - {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, - {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, - {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, - {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, - {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, - {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, - {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, - {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, - {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, - {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, - {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, - {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, + {file = "xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ece616532c499ee9afbb83078b1b952beffef121d989841f7f4b3dc5ac0fd212"}, + {file = "xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3171f693dbc2cef6477054a665dc255d996646b4023fe56cb4db80e26f4cc520"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c5d3e570ef46adaf93fc81b44aca6002b5a4d8ca11bd0580c07eac537f36680"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cb29a034301e2982df8b1fe6328a84f4b676106a13e9135a0d7e0c3e9f806da"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0d307d27099bb0cbeea7260eb39ed4fdb99c5542e21e94bb6fd29e49c57a23"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0342aafd421795d740e514bc9858ebddfc705a75a8c5046ac56d85fe97bf196"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dbbd9892c5ebffeca1ed620cf0ade13eb55a0d8c84e0751a6653adc6ac40d0c"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4cc2d67fdb4d057730c75a64c5923abfa17775ae234a71b0200346bfb0a7f482"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ec28adb204b759306a3d64358a5e5c07d7b1dd0ccbce04aa76cb9377b7b70296"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1328f6d8cca2b86acb14104e381225a3d7b42c92c4b86ceae814e5c400dbb415"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8d47ebd9f5d9607fd039c1fbf4994e3b071ea23eff42f4ecef246ab2b7334198"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b96d559e0fcddd3343c510a0fe2b127fbff16bf346dd76280b82292567523442"}, + {file = "xxhash-3.5.0-cp310-cp310-win32.whl", hash = "sha256:61c722ed8d49ac9bc26c7071eeaa1f6ff24053d553146d5df031802deffd03da"}, + {file = "xxhash-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:9bed5144c6923cc902cd14bb8963f2d5e034def4486ab0bbe1f58f03f042f9a9"}, + {file = "xxhash-3.5.0-cp310-cp310-win_arm64.whl", hash = "sha256:893074d651cf25c1cc14e3bea4fceefd67f2921b1bb8e40fcfeba56820de80c6"}, + {file = "xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02c2e816896dc6f85922ced60097bcf6f008dedfc5073dcba32f9c8dd786f3c1"}, + {file = "xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6027dcd885e21581e46d3c7f682cfb2b870942feeed58a21c29583512c3f09f8"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1308fa542bbdbf2fa85e9e66b1077eea3a88bef38ee8a06270b4298a7a62a166"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c28b2fdcee797e1c1961cd3bcd3d545cab22ad202c846235197935e1df2f8ef7"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:924361811732ddad75ff23e90efd9ccfda4f664132feecb90895bade6a1b4623"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89997aa1c4b6a5b1e5b588979d1da048a3c6f15e55c11d117a56b75c84531f5a"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c4f4e8c59837de103344eb1c8a3851f670309eb5c361f746805c5471b8c88"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbd2ecfbfee70bc1a4acb7461fa6af7748ec2ab08ac0fa298f281c51518f982c"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25b5a51dc3dfb20a10833c8eee25903fd2e14059e9afcd329c9da20609a307b2"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a8fb786fb754ef6ff8c120cb96629fb518f8eb5a61a16aac3a979a9dbd40a084"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a905ad00ad1e1c34fe4e9d7c1d949ab09c6fa90c919860c1534ff479f40fd12d"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:963be41bcd49f53af6d795f65c0da9b4cc518c0dd9c47145c98f61cb464f4839"}, + {file = "xxhash-3.5.0-cp311-cp311-win32.whl", hash = "sha256:109b436096d0a2dd039c355fa3414160ec4d843dfecc64a14077332a00aeb7da"}, + {file = "xxhash-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:b702f806693201ad6c0a05ddbbe4c8f359626d0b3305f766077d51388a6bac58"}, + {file = "xxhash-3.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:c4dcb4120d0cc3cc448624147dba64e9021b278c63e34a38789b688fd0da9bf3"}, + {file = "xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00"}, + {file = "xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e"}, + {file = "xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8"}, + {file = "xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e"}, + {file = "xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2"}, + {file = "xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6"}, + {file = "xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c"}, + {file = "xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637"}, + {file = "xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43"}, + {file = "xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b"}, + {file = "xxhash-3.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6e5f70f6dca1d3b09bccb7daf4e087075ff776e3da9ac870f86ca316736bb4aa"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e76e83efc7b443052dd1e585a76201e40b3411fe3da7af4fe434ec51b2f163b"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33eac61d0796ca0591f94548dcfe37bb193671e0c9bcf065789b5792f2eda644"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ec70a89be933ea49222fafc3999987d7899fc676f688dd12252509434636622"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86b8e7f703ec6ff4f351cfdb9f428955859537125904aa8c963604f2e9d3e7"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0adfbd36003d9f86c8c97110039f7539b379f28656a04097e7434d3eaf9aa131"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:63107013578c8a730419adc05608756c3fa640bdc6abe806c3123a49fb829f43"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:683b94dbd1ca67557850b86423318a2e323511648f9f3f7b1840408a02b9a48c"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5d2a01dcce81789cf4b12d478b5464632204f4c834dc2d064902ee27d2d1f0ee"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:a9d360a792cbcce2fe7b66b8d51274ec297c53cbc423401480e53b26161a290d"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:f0b48edbebea1b7421a9c687c304f7b44d0677c46498a046079d445454504737"}, + {file = "xxhash-3.5.0-cp37-cp37m-win32.whl", hash = "sha256:7ccb800c9418e438b44b060a32adeb8393764da7441eb52aa2aa195448935306"}, + {file = "xxhash-3.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c3bc7bf8cb8806f8d1c9bf149c18708cb1c406520097d6b0a73977460ea03602"}, + {file = "xxhash-3.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74752ecaa544657d88b1d1c94ae68031e364a4d47005a90288f3bab3da3c970f"}, + {file = "xxhash-3.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dee1316133c9b463aa81aca676bc506d3f80d8f65aeb0bba2b78d0b30c51d7bd"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:602d339548d35a8579c6b013339fb34aee2df9b4e105f985443d2860e4d7ffaa"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:695735deeddfb35da1677dbc16a083445360e37ff46d8ac5c6fcd64917ff9ade"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1030a39ba01b0c519b1a82f80e8802630d16ab95dc3f2b2386a0b5c8ed5cbb10"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5bc08f33c4966f4eb6590d6ff3ceae76151ad744576b5fc6c4ba8edd459fdec"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160e0c19ee500482ddfb5d5570a0415f565d8ae2b3fd69c5dcfce8a58107b1c3"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f1abffa122452481a61c3551ab3c89d72238e279e517705b8b03847b1d93d738"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d5e9db7ef3ecbfc0b4733579cea45713a76852b002cf605420b12ef3ef1ec148"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:23241ff6423378a731d84864bf923a41649dc67b144debd1077f02e6249a0d54"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:82b833d5563fefd6fceafb1aed2f3f3ebe19f84760fdd289f8b926731c2e6e91"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a80ad0ffd78bef9509eee27b4a29e56f5414b87fb01a888353e3d5bda7038bd"}, + {file = "xxhash-3.5.0-cp38-cp38-win32.whl", hash = "sha256:50ac2184ffb1b999e11e27c7e3e70cc1139047e7ebc1aa95ed12f4269abe98d4"}, + {file = "xxhash-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:392f52ebbb932db566973693de48f15ce787cabd15cf6334e855ed22ea0be5b3"}, + {file = "xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc8cdd7f33d57f0468b0614ae634cc38ab9202c6957a60e31d285a71ebe0301"}, + {file = "xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0c48b6300cd0b0106bf49169c3e0536408dfbeb1ccb53180068a18b03c662ab"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1a92cfbaa0a1253e339ccec42dbe6db262615e52df591b68726ab10338003f"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33513d6cc3ed3b559134fb307aae9bdd94d7e7c02907b37896a6c45ff9ce51bd"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eefc37f6138f522e771ac6db71a6d4838ec7933939676f3753eafd7d3f4c40bc"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a606c8070ada8aa2a88e181773fa1ef17ba65ce5dd168b9d08038e2a61b33754"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42eca420c8fa072cc1dd62597635d140e78e384a79bb4944f825fbef8bfeeef6"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:604253b2143e13218ff1ef0b59ce67f18b8bd1c4205d2ffda22b09b426386898"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6e93a5ad22f434d7876665444a97e713a8f60b5b1a3521e8df11b98309bff833"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7a46e1d6d2817ba8024de44c4fd79913a90e5f7265434cef97026215b7d30df6"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:30eb2efe6503c379b7ab99c81ba4a779748e3830241f032ab46bd182bf5873af"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c8aa771ff2c13dd9cda8166d685d7333d389fae30a4d2bb39d63ab5775de8606"}, + {file = "xxhash-3.5.0-cp39-cp39-win32.whl", hash = "sha256:5ed9ebc46f24cf91034544b26b131241b699edbfc99ec5e7f8f3d02d6eb7fba4"}, + {file = "xxhash-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:220f3f896c6b8d0316f63f16c077d52c412619e475f9372333474ee15133a558"}, + {file = "xxhash-3.5.0-cp39-cp39-win_arm64.whl", hash = "sha256:a7b1d8315d9b5e9f89eb2933b73afae6ec9597a258d52190944437158b49d38e"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2014c5b3ff15e64feecb6b713af12093f75b7926049e26a580e94dcad3c73d8c"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fab81ef75003eda96239a23eda4e4543cedc22e34c373edcaf744e721a163986"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e2febf914ace002132aa09169cc572e0d8959d0f305f93d5828c4836f9bc5a6"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d3a10609c51da2a1c0ea0293fc3968ca0a18bd73838455b5bca3069d7f8e32b"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a74f23335b9689b66eb6dbe2a931a88fcd7a4c2cc4b1cb0edba8ce381c7a1da"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2b4154c00eb22e4d543f472cfca430e7962a0f1d0f3778334f2e08a7ba59363c"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d30bbc1644f726b825b3278764240f449d75f1a8bdda892e641d4a688b1494ae"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa0b72f2423e2aa53077e54a61c28e181d23effeaafd73fcb9c494e60930c8e"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13de2b76c1835399b2e419a296d5b38dc4855385d9e96916299170085ef72f57"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0691bfcc4f9c656bcb96cc5db94b4d75980b9d5589f2e59de790091028580837"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:297595fe6138d4da2c8ce9e72a04d73e58725bb60f3a19048bc96ab2ff31c692"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1276d369452040cbb943300dc8abeedab14245ea44056a2943183822513a18"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2061188a1ba352fc699c82bff722f4baacb4b4b8b2f0c745d2001e56d0dfb514"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c384c434021e4f62b8d9ba0bc9467e14d394893077e2c66d826243025e1f81"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e6a4dd644d72ab316b580a1c120b375890e4c52ec392d4aef3c63361ec4d77d1"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:531af8845aaadcadf951b7e0c1345c6b9c68a990eeb74ff9acd8501a0ad6a1c9"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce379bcaa9fcc00f19affa7773084dd09f5b59947b3fb47a1ceb0179f91aaa1"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd1b2281d01723f076df3c8188f43f2472248a6b63118b036e641243656b1b0f"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c770750cc80e8694492244bca7251385188bc5597b6a39d98a9f30e8da984e0"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b150b8467852e1bd844387459aa6fbe11d7f38b56e901f9f3b3e6aba0d660240"}, + {file = "xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f"}, ] [[package]] name = "yarl" -version = "1.9.4" +version = "1.18.3" description = "Yet another URL library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, + {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, + {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, + {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, + {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, + {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, + {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, + {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, + {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, + {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, + {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, + {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, + {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, ] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" +propcache = ">=0.2.0" [[package]] name = "zipp" -version = "3.19.2" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" -content-hash = "850edd56328727c4ff8ebf9b6642b37ff645c866d2406beb21cbaf6d64332ebf" +content-hash = "509cb4e31fdb6eb2002be3dc8cdaec2a9fd750c7b78bcbae94eb911edaf7a7de" From 177a96c325104a15df8bb84dbc38d9983cfb776c Mon Sep 17 00:00:00 2001 From: fm1320 Date: Tue, 10 Dec 2024 15:09:54 +0000 Subject: [PATCH 33/40] trailing whitespace fix --- adalflow/poetry.lock | 2928 +++++++++++++++++++++------------------ adalflow/pyproject.toml | 5 + 2 files changed, 1595 insertions(+), 1338 deletions(-) diff --git a/adalflow/poetry.lock b/adalflow/poetry.lock index d2ee1af5..bac6b6cc 100644 --- a/adalflow/poetry.lock +++ b/adalflow/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "absl-py" @@ -13,123 +13,109 @@ files = [ [[package]] name = "aiohappyeyeballs" -version = "2.4.3" +version = "2.4.4" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, - {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, + {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"}, + {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"}, ] [[package]] name = "aiohttp" -version = "3.10.9" +version = "3.11.10" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "aiohttp-3.10.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8b3fb28a9ac8f2558760d8e637dbf27aef1e8b7f1d221e8669a1074d1a266bb2"}, - {file = "aiohttp-3.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91aa966858593f64c8a65cdefa3d6dc8fe3c2768b159da84c1ddbbb2c01ab4ef"}, - {file = "aiohttp-3.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63649309da83277f06a15bbdc2a54fbe75efb92caa2c25bb57ca37762789c746"}, - {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e7fabedb3fe06933f47f1538df7b3a8d78e13d7167195f51ca47ee12690373"}, - {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c070430fda1a550a1c3a4c2d7281d3b8cfc0c6715f616e40e3332201a253067"}, - {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51d0a4901b27272ae54e42067bc4b9a90e619a690b4dc43ea5950eb3070afc32"}, - {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fec5fac7aea6c060f317f07494961236434928e6f4374e170ef50b3001e14581"}, - {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:172ad884bb61ad31ed7beed8be776eb17e7fb423f1c1be836d5cb357a096bf12"}, - {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d646fdd74c25bbdd4a055414f0fe32896c400f38ffbdfc78c68e62812a9e0257"}, - {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e86260b76786c28acf0b5fe31c8dca4c2add95098c709b11e8c35b424ebd4f5b"}, - {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d7cafc11d70fdd8801abfc2ff276744ae4cb39d8060b6b542c7e44e5f2cfc2"}, - {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc262c3df78c8ff6020c782d9ce02e4bcffe4900ad71c0ecdad59943cba54442"}, - {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:482c85cf3d429844396d939b22bc2a03849cb9ad33344689ad1c85697bcba33a"}, - {file = "aiohttp-3.10.9-cp310-cp310-win32.whl", hash = "sha256:aeebd3061f6f1747c011e1d0b0b5f04f9f54ad1a2ca183e687e7277bef2e0da2"}, - {file = "aiohttp-3.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:fa430b871220dc62572cef9c69b41e0d70fcb9d486a4a207a5de4c1f25d82593"}, - {file = "aiohttp-3.10.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:16e6a51d8bc96b77f04a6764b4ad03eeef43baa32014fce71e882bd71302c7e4"}, - {file = "aiohttp-3.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8bd9125dd0cc8ebd84bff2be64b10fdba7dc6fd7be431b5eaf67723557de3a31"}, - {file = "aiohttp-3.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dcf354661f54e6a49193d0b5653a1b011ba856e0b7a76bda2c33e4c6892f34ea"}, - {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42775de0ca04f90c10c5c46291535ec08e9bcc4756f1b48f02a0657febe89b10"}, - {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d1e4185c5d7187684d41ebb50c9aeaaaa06ca1875f4c57593071b0409d2444"}, - {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2695c61cf53a5d4345a43d689f37fc0f6d3a2dc520660aec27ec0f06288d1f9"}, - {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a3f063b41cc06e8d0b3fcbbfc9c05b7420f41287e0cd4f75ce0a1f3d80729e6"}, - {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d37f4718002863b82c6f391c8efd4d3a817da37030a29e2682a94d2716209de"}, - {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2746d8994ebca1bdc55a1e998feff4e94222da709623bb18f6e5cfec8ec01baf"}, - {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6f3c6648aa123bcd73d6f26607d59967b607b0da8ffcc27d418a4b59f4c98c7c"}, - {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:558b3d223fd631ad134d89adea876e7fdb4c93c849ef195049c063ada82b7d08"}, - {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4e6cb75f8ddd9c2132d00bc03c9716add57f4beff1263463724f6398b813e7eb"}, - {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:608cecd8d58d285bfd52dbca5b6251ca8d6ea567022c8a0eaae03c2589cd9af9"}, - {file = "aiohttp-3.10.9-cp311-cp311-win32.whl", hash = "sha256:36d4fba838be5f083f5490ddd281813b44d69685db910907636bc5dca6322316"}, - {file = "aiohttp-3.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:8be1a65487bdfc285bd5e9baf3208c2132ca92a9b4020e9f27df1b16fab998a9"}, - {file = "aiohttp-3.10.9-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4fd16b30567c5b8e167923be6e027eeae0f20cf2b8a26b98a25115f28ad48ee0"}, - {file = "aiohttp-3.10.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:40ff5b7660f903dc587ed36ef08a88d46840182d9d4b5694e7607877ced698a1"}, - {file = "aiohttp-3.10.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4edc3fd701e2b9a0d605a7b23d3de4ad23137d23fc0dbab726aa71d92f11aaaf"}, - {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e525b69ee8a92c146ae5b4da9ecd15e518df4d40003b01b454ad694a27f498b5"}, - {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5002a02c17fcfd796d20bac719981d2fca9c006aac0797eb8f430a58e9d12431"}, - {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4ceeae2fb8cabdd1b71c82bfdd39662473d3433ec95b962200e9e752fb70d0"}, - {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e395c3d1f773cf0651cd3559e25182eb0c03a2777b53b4575d8adc1149c6e9"}, - {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbdb8def5268f3f9cd753a265756f49228a20ed14a480d151df727808b4531dd"}, - {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f82ace0ec57c94aaf5b0e118d4366cff5889097412c75aa14b4fd5fc0c44ee3e"}, - {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6ebdc3b3714afe1b134b3bbeb5f745eed3ecbcff92ab25d80e4ef299e83a5465"}, - {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f9ca09414003c0e96a735daa1f071f7d7ed06962ef4fa29ceb6c80d06696d900"}, - {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1298b854fd31d0567cbb916091be9d3278168064fca88e70b8468875ef9ff7e7"}, - {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:60ad5b8a7452c0f5645c73d4dad7490afd6119d453d302cd5b72b678a85d6044"}, - {file = "aiohttp-3.10.9-cp312-cp312-win32.whl", hash = "sha256:1a0ee6c0d590c917f1b9629371fce5f3d3f22c317aa96fbdcce3260754d7ea21"}, - {file = "aiohttp-3.10.9-cp312-cp312-win_amd64.whl", hash = "sha256:c46131c6112b534b178d4e002abe450a0a29840b61413ac25243f1291613806a"}, - {file = "aiohttp-3.10.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2bd9f3eac515c16c4360a6a00c38119333901b8590fe93c3257a9b536026594d"}, - {file = "aiohttp-3.10.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8cc0d13b4e3b1362d424ce3f4e8c79e1f7247a00d792823ffd640878abf28e56"}, - {file = "aiohttp-3.10.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ba1a599255ad6a41022e261e31bc2f6f9355a419575b391f9655c4d9e5df5ff5"}, - {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:776e9f3c9b377fcf097c4a04b241b15691e6662d850168642ff976780609303c"}, - {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8debb45545ad95b58cc16c3c1cc19ad82cffcb106db12b437885dbee265f0ab5"}, - {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2555e4949c8d8782f18ef20e9d39730d2656e218a6f1a21a4c4c0b56546a02e"}, - {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c54dc329cd44f7f7883a9f4baaefe686e8b9662e2c6c184ea15cceee587d8d69"}, - {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e709d6ac598c5416f879bb1bae3fd751366120ac3fa235a01de763537385d036"}, - {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:17c272cfe7b07a5bb0c6ad3f234e0c336fb53f3bf17840f66bd77b5815ab3d16"}, - {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0c21c82df33b264216abffff9f8370f303dab65d8eee3767efbbd2734363f677"}, - {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9331dd34145ff105177855017920dde140b447049cd62bb589de320fd6ddd582"}, - {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ac3196952c673822ebed8871cf8802e17254fff2a2ed4835d9c045d9b88c5ec7"}, - {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2c33fa6e10bb7ed262e3ff03cc69d52869514f16558db0626a7c5c61dde3c29f"}, - {file = "aiohttp-3.10.9-cp313-cp313-win32.whl", hash = "sha256:a14e4b672c257a6b94fe934ee62666bacbc8e45b7876f9dd9502d0f0fe69db16"}, - {file = "aiohttp-3.10.9-cp313-cp313-win_amd64.whl", hash = "sha256:a35ed3d03910785f7d9d6f5381f0c24002b2b888b298e6f941b2fc94c5055fcd"}, - {file = "aiohttp-3.10.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f392ef50e22c31fa49b5a46af7f983fa3f118f3eccb8522063bee8bfa6755f8"}, - {file = "aiohttp-3.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d1f5c9169e26db6a61276008582d945405b8316aae2bb198220466e68114a0f5"}, - {file = "aiohttp-3.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8d9d10d10ec27c0d46ddaecc3c5598c4db9ce4e6398ca872cdde0525765caa2f"}, - {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d97273a52d7f89a75b11ec386f786d3da7723d7efae3034b4dda79f6f093edc1"}, - {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d271f770b52e32236d945911b2082f9318e90ff835d45224fa9e28374303f729"}, - {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7003f33f5f7da1eb02f0446b0f8d2ccf57d253ca6c2e7a5732d25889da82b517"}, - {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6e00c8a92e7663ed2be6fcc08a2997ff06ce73c8080cd0df10cc0321a3168d7"}, - {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a61df62966ce6507aafab24e124e0c3a1cfbe23c59732987fc0fd0d71daa0b88"}, - {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:60555211a006d26e1a389222e3fab8cd379f28e0fbf7472ee55b16c6c529e3a6"}, - {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d15a29424e96fad56dc2f3abed10a89c50c099f97d2416520c7a543e8fddf066"}, - {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:a19caae0d670771ea7854ca30df76f676eb47e0fd9b2ee4392d44708f272122d"}, - {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:99f9678bf0e2b1b695e8028fedac24ab6770937932eda695815d5a6618c37e04"}, - {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2914caa46054f3b5ff910468d686742ff8cff54b8a67319d75f5d5945fd0a13d"}, - {file = "aiohttp-3.10.9-cp38-cp38-win32.whl", hash = "sha256:0bc059ecbce835630e635879f5f480a742e130d9821fbe3d2f76610a6698ee25"}, - {file = "aiohttp-3.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:e883b61b75ca6efc2541fcd52a5c8ccfe288b24d97e20ac08fdf343b8ac672ea"}, - {file = "aiohttp-3.10.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fcd546782d03181b0b1d20b43d612429a90a68779659ba8045114b867971ab71"}, - {file = "aiohttp-3.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:85711eec2d875cd88c7eb40e734c4ca6d9ae477d6f26bd2b5bb4f7f60e41b156"}, - {file = "aiohttp-3.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02d1d6610588bcd743fae827bd6f2e47e0d09b346f230824b4c6fb85c6065f9c"}, - {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3668d0c2a4d23fb136a753eba42caa2c0abbd3d9c5c87ee150a716a16c6deec1"}, - {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7c071235a47d407b0e93aa6262b49422dbe48d7d8566e1158fecc91043dd948"}, - {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac74e794e3aee92ae8f571bfeaa103a141e409863a100ab63a253b1c53b707eb"}, - {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bbf94d4a0447705b7775417ca8bb8086cc5482023a6e17cdc8f96d0b1b5aba6"}, - {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb0b2d5d51f96b6cc19e6ab46a7b684be23240426ae951dcdac9639ab111b45e"}, - {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e83dfefb4f7d285c2d6a07a22268344a97d61579b3e0dce482a5be0251d672ab"}, - {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f0a44bb40b6aaa4fb9a5c1ee07880570ecda2065433a96ccff409c9c20c1624a"}, - {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c2b627d3c8982691b06d89d31093cee158c30629fdfebe705a91814d49b554f8"}, - {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:03690541e4cc866eef79626cfa1ef4dd729c5c1408600c8cb9e12e1137eed6ab"}, - {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad3675c126f2a95bde637d162f8231cff6bc0bc9fbe31bd78075f9ff7921e322"}, - {file = "aiohttp-3.10.9-cp39-cp39-win32.whl", hash = "sha256:1321658f12b6caffafdc35cfba6c882cb014af86bef4e78c125e7e794dfb927b"}, - {file = "aiohttp-3.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:9fdf5c839bf95fc67be5794c780419edb0dbef776edcfc6c2e5e2ffd5ee755fa"}, - {file = "aiohttp-3.10.9.tar.gz", hash = "sha256:143b0026a9dab07a05ad2dd9e46aa859bffdd6348ddc5967b42161168c24f857"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d"}, + {file = "aiohttp-3.11.10-cp310-cp310-win32.whl", hash = "sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91"}, + {file = "aiohttp-3.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3"}, + {file = "aiohttp-3.11.10-cp311-cp311-win32.whl", hash = "sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4"}, + {file = "aiohttp-3.11.10-cp311-cp311-win_amd64.whl", hash = "sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec"}, + {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b78f053a7ecfc35f0451d961dacdc671f4bcbc2f58241a7c820e9d82559844cf"}, + {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab7485222db0959a87fbe8125e233b5a6f01f4400785b36e8a7878170d8c3138"}, + {file = "aiohttp-3.11.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf14627232dfa8730453752e9cdc210966490992234d77ff90bc8dc0dce361d5"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076bc454a7e6fd646bc82ea7f98296be0b1219b5e3ef8a488afbdd8e81fbac50"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:482cafb7dc886bebeb6c9ba7925e03591a62ab34298ee70d3dd47ba966370d2c"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf3d1a519a324af764a46da4115bdbd566b3c73fb793ffb97f9111dbc684fc4d"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24213ba85a419103e641e55c27dc7ff03536c4873470c2478cce3311ba1eee7b"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b99acd4730ad1b196bfb03ee0803e4adac371ae8efa7e1cbc820200fc5ded109"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:14cdb5a9570be5a04eec2ace174a48ae85833c2aadc86de68f55541f66ce42ab"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7e97d622cb083e86f18317282084bc9fbf261801b0192c34fe4b1febd9f7ae69"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:012f176945af138abc10c4a48743327a92b4ca9adc7a0e078077cdb5dbab7be0"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44224d815853962f48fe124748227773acd9686eba6dc102578defd6fc99e8d9"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c87bf31b7fdab94ae3adbe4a48e711bfc5f89d21cf4c197e75561def39e223bc"}, + {file = "aiohttp-3.11.10-cp312-cp312-win32.whl", hash = "sha256:06a8e2ee1cbac16fe61e51e0b0c269400e781b13bcfc33f5425912391a542985"}, + {file = "aiohttp-3.11.10-cp312-cp312-win_amd64.whl", hash = "sha256:be2b516f56ea883a3e14dda17059716593526e10fb6303189aaf5503937db408"}, + {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816"}, + {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf"}, + {file = "aiohttp-3.11.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836"}, + {file = "aiohttp-3.11.10-cp313-cp313-win32.whl", hash = "sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c"}, + {file = "aiohttp-3.11.10-cp313-cp313-win_amd64.whl", hash = "sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4"}, + {file = "aiohttp-3.11.10-cp39-cp39-win32.whl", hash = "sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be"}, + {file = "aiohttp-3.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74"}, + {file = "aiohttp-3.11.10.tar.gz", hash = "sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e"}, ] [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -yarl = ">=1.12.0,<2.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] @@ -186,35 +172,35 @@ vertex = ["google-auth (>=2,<3)"] [[package]] name = "anyio" -version = "4.6.0" +version = "4.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" files = [ - {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, - {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, + {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, + {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] name = "async-timeout" -version = "4.0.3" +version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] @@ -238,13 +224,13 @@ tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "azure-core" -version = "1.31.0" +version = "1.32.0" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure_core-1.31.0-py3-none-any.whl", hash = "sha256:22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd"}, - {file = "azure_core-1.31.0.tar.gz", hash = "sha256:656a0dd61e1869b1506b7c6a3b31d62f15984b1a573d6326f6aa2f3e4123284b"}, + {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"}, + {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"}, ] [package.dependencies] @@ -257,13 +243,13 @@ aio = ["aiohttp (>=3.0)"] [[package]] name = "azure-identity" -version = "1.18.0" +version = "1.19.0" description = "Microsoft Azure Identity Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure_identity-1.18.0-py3-none-any.whl", hash = "sha256:bccf6106245b49ff41d0c4cd7b72851c5a2ba3a32cef7589da246f5727f26f02"}, - {file = "azure_identity-1.18.0.tar.gz", hash = "sha256:f567579a65d8932fa913c76eddf3305101a15e5727a5e4aa5df649a0f553d4c3"}, + {file = "azure_identity-1.19.0-py3-none-any.whl", hash = "sha256:e3f6558c181692d7509f09de10cca527c7dce426776454fb97df512a46527e81"}, + {file = "azure_identity-1.19.0.tar.gz", hash = "sha256:500144dc18197d7019b81501165d4fa92225f03778f17d7ca8a2a180129a9c83"}, ] [package.dependencies] @@ -286,17 +272,17 @@ files = [ [[package]] name = "boto3" -version = "1.35.34" +version = "1.35.77" description = "The AWS SDK for Python" optional = true python-versions = ">=3.8" files = [ - {file = "boto3-1.35.34-py3-none-any.whl", hash = "sha256:291e7b97a34967ed93297e6171f1bebb8529e64633dd48426760e3fdef1cdea8"}, - {file = "boto3-1.35.34.tar.gz", hash = "sha256:57e6ee8504e7929bc094bb2afc879943906064179a1e88c23b4812e2c6f61532"}, + {file = "boto3-1.35.77-py3-none-any.whl", hash = "sha256:a09871805f8e462349a1c33c23eb413668df0bf68424e61d53518e1a7d883b2f"}, + {file = "boto3-1.35.77.tar.gz", hash = "sha256:cc819cdbccbc2d0dc185f1dcfe74cf3809489c4cae63c2e5d6a557aa0c5ab928"}, ] [package.dependencies] -botocore = ">=1.35.34,<1.36.0" +botocore = ">=1.35.77,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -305,13 +291,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.34" +version = "1.35.77" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.34-py3-none-any.whl", hash = "sha256:ccb0fe397b11b81c9abc0c87029d17298e17bf658d8db5c0c5a551a12a207e7a"}, - {file = "botocore-1.35.34.tar.gz", hash = "sha256:789b6501a3bb4a9591c1fe10da200cc315c1fa5df5ada19c720d8ef06439b3e3"}, + {file = "botocore-1.35.77-py3-none-any.whl", hash = "sha256:3faa27d65841499762228902d7e215fa99a4c2fdc76c9113e1c3f339bdf685b8"}, + {file = "botocore-1.35.77.tar.gz", hash = "sha256:17b778016644e9342ca3ff2f430c1d1db0c6126e9b41a57cff52ac58e7a455e0"}, ] [package.dependencies] @@ -439,119 +425,148 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "cohere" -version = "5.10.0" +version = "5.13.3" description = "" optional = true -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "cohere-5.10.0-py3-none-any.whl", hash = "sha256:46e50e3e8514a99cf77b4c022c8077a6205fba948051c33087ddeb66ec706f0a"}, - {file = "cohere-5.10.0.tar.gz", hash = "sha256:21020a7ae4c30f72991ef91566a926a9d7d1485d7abeed7bfa2bd6f35ea34783"}, + {file = "cohere-5.13.3-py3-none-any.whl", hash = "sha256:076c88fdd3d670b6577eb8e813a9072bf18b59648d4092c6f0263af3c27bf81f"}, + {file = "cohere-5.13.3.tar.gz", hash = "sha256:70d87e0d5ce48aaee5ba70ead5efbade226cb2a4b11bfcfb676f6a2db3642819"}, ] [package.dependencies] -boto3 = ">=1.34.0,<2.0.0" fastavro = ">=1.9.4,<2.0.0" httpx = ">=0.21.2" httpx-sse = "0.4.0" +numpy = ">=1.26,<2.0" parameterized = ">=0.9.0,<0.10.0" pydantic = ">=1.9.2" pydantic-core = ">=2.18.2,<3.0.0" @@ -560,6 +575,9 @@ tokenizers = ">=0.15,<1" types-requests = ">=2.0.0,<3.0.0" typing_extensions = ">=4.0.0" +[package.extras] +aws = ["boto3 (>=1.34.0,<2.0.0)", "sagemaker (>=2.232.1,<3.0.0)"] + [[package]] name = "colorama" version = "0.4.6" @@ -657,38 +675,38 @@ test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist" [[package]] name = "cryptography" -version = "43.0.1" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, - {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, - {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, - {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, - {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, - {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, - {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] @@ -701,7 +719,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -763,6 +781,31 @@ tests-numpy2 = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0 torch = ["torch"] vision = ["Pillow (>=9.4.0)"] +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + [[package]] name = "dill" version = "0.3.8" @@ -791,13 +834,13 @@ files = [ [[package]] name = "distlib" -version = "0.3.8" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] @@ -827,41 +870,41 @@ test = ["pytest (>=6)"] [[package]] name = "faiss-cpu" -version = "1.8.0.post1" +version = "1.9.0.post1" description = "A library for efficient similarity search and clustering of dense vectors." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:fd84721eb599aa1da19b1b36345bb8705a60bb1d2887bbbc395a29e3d36a1a62"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b78ff9079d15fd0f156bf5dd8a2975a8abffac1854a86ece263eec1500a2e836"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de25c943d1789e35fe06a20884c88cd32aedbb1a33bb8da2238cdea7bd9633f"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adae0f1b144e7216da696f14bc4991ca4300c94baaa59247c3d322588e661c95"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-win_amd64.whl", hash = "sha256:00345290680a444a4b4cb2d98a3844bb5c401a2160fee547c7631d759fd2ec3e"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:8d4bade10cb63e9f9ff261751edd7eb097b1f4bf30be4d0d25d6f688559d795e"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20bd43eca3b7d77e71ea56b7a558cc28e900d8abff417eb285e2d92e95d934d4"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8542a87743a7f94ac656fd3e9592ad57e58b04d961ad2fe654a22a8ca59defdb"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed46928de3dc20170b10fec89c54075a11383c2aaf4f119c63e0f6ae5a507d74"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-win_amd64.whl", hash = "sha256:4fa5fc8ea210b919aa469e27d6687e50052db906e7fec3f2257178b1384fa18b"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:96aec0d08a3099883af3a9b6356cfe736e8bd879318a940a27e9d1ae6f33d788"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:92b06147fa84732ecdc965922e8ef50dc7011ef8be65821ff4abb2118cb5dce0"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:709ef9394d1148aef70dbe890edbde8c282a4a2e06a8b69ab64f65e90f5ba572"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:327a9c30971bf72cd8392b15eb4aff5d898c453212eae656dfaa3ba555b9ca0c"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-win_amd64.whl", hash = "sha256:8756f1d93faba56349883fa2f5d47fe36bb2f11f789200c6b1c691ef805485f2"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:f4a3045909c447bf1955b70083891e80f2c87c5427f20cae25245e08ec5c9e52"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8842b7fc921ca1fafdb0845f2ba029e79df04eebae72ab135239f93478a9b7a2"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d5a9799634e32c3862d5436d1e78112ed9a38f319e4523f5916e55d86adda8f"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a70923b0fbbb40f647e20bcbcbfd472277e6d84bb23ff12d2a94b6841806b55"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-win_amd64.whl", hash = "sha256:ce652df3c4dd50c88ac9235d072f30ce60694dc422c5f523bbbcab320e8f3097"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:83ef04b17b19189dd6601a941bdf4bfa9de0740dbcd80305aeba51a1b1955f80"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c50c8697077470ede7f1939ef8dc8a846ec19cf1893b543f6b67f9af03b0a122"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ce428a7a67fe5c64047280e5e12a8dbdecf7002f9d127b26cf1db354e9fe76"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3b36b80380bae523e3198cfb4a137867055945ce7bf10d18fe9f0284f2fb47"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-win_amd64.whl", hash = "sha256:4fcc67a2353f08a20c1ab955de3cde14ef3b447761b26244a5aa849c15cbc9b3"}, - {file = "faiss_cpu-1.8.0.post1.tar.gz", hash = "sha256:5686af34414678c3d49c4fa8d774df7156e9cb48d7029071e56230e74b01cc13"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:e18602465f5a96c3c973ab440f9263a0881034fb54810be20bc8cdb8b069456d"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5dddeecdb68fb95b4a3343a6ff89498fd7c222726706538f360132bfe3d8aebe"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15d2d7e522e6d55dbf14e57fcac1d38d62c95479b847562004f9e7c97c139ee8"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ffbbb1ec9ae503df1fcdfd5c3a8594d8b76fb4b8ebf0a697c1492f1f9cec1a"}, + {file = "faiss_cpu-1.9.0.post1-cp310-cp310-win_amd64.whl", hash = "sha256:29cae0dfa6c286c043d45572a39288f5a56ffb694a20a90c6946018241002d90"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:7ef0c81a798a64fc932e15d560ddc01021df9ed70b678367aec6e01f39d075c1"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:783f545c3999909164a975b97d99749b244b62651ce976ee76b8a171c62e827d"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0408261ed85d0bd8e30716a3fd441d0c51a5563cf3a795a488eab9c492ea33"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7068e14e8f557659c68bdf4d511571630721e1502efa87a70fe44023f3741645"}, + {file = "faiss_cpu-1.9.0.post1-cp311-cp311-win_amd64.whl", hash = "sha256:274a66868a498687641faf964f6eddbe70ccb5bee56239862ee0aa079415779e"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:ae3fbe0f26c05bef26c626f9e293cc4dd0e685ec02d64100c686276a8c14bf88"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b4d5e79643a09d91d339ba7609fb2e9b3ce6de3cd069b9183e97a843261e0e8"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bd1a0412528202e4a4cc38953f81bb7d9b9a783881fa06d822b717a1b090bdd"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4a499aa20b00266c78b9768de962e6a8dd2e2b2eb3d02aa4c41af4c6913eeba"}, + {file = "faiss_cpu-1.9.0.post1-cp312-cp312-win_amd64.whl", hash = "sha256:d6920f2db8581eb6dcd519c024120061d7d68bc075d494e59b1b2af9a1729d03"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:10e38642c5f147642c4aa8a6c1704fb1900b2b8dd5f33b49a45fa5a67df4837d"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ec25338fc06fa8aa6ef5c7a2ba9f1aa03f64f9b38ba82402a6495cc981426571"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2951be3d2713a128e7f625a4b508419238b6c09cce747a0de7708bdcf1b7e3d6"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6467aafa148d39e6e9bc26c1d84e07f16cbf910297a90ec2e8597cf69772a82"}, + {file = "faiss_cpu-1.9.0.post1-cp313-cp313-win_amd64.whl", hash = "sha256:87a224a01a4ad80e0f849b2b2b1fba8b197e5803416ea861faf1b0de255871ea"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a88ebc13a6827b2520eddd5f1c948f377c34cc07571ce6e4c0e6af0696a77288"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0241d7441c291a64a26f8ae3d9c23adccb392524a4a5555b32aff08c7c1766f2"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aafa02b77e9c94b858cf86bc69bfa72a3754b5cfe8a0e9c1c70c6cf5c8c6b0a6"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba6e57971d7b112eb372d805a809b36573f50c10a08a7ecc97e4039ec369a1f6"}, + {file = "faiss_cpu-1.9.0.post1-cp39-cp39-win_amd64.whl", hash = "sha256:b4eeb44949805d4a88de507636b01382da0527280a64ecb99bc4eb596a1a81e5"}, + {file = "faiss_cpu-1.9.0.post1.tar.gz", hash = "sha256:920725d485aab05dd87d34ef63257332441e9b53d382069f034996465827143a"}, ] [package.dependencies] -numpy = ">=1.0,<2.0" +numpy = ">=1.25.0,<3.0" packaging = "*" [[package]] @@ -928,59 +971,61 @@ typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "fonttools" -version = "4.54.1" +version = "4.55.2" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.54.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ed7ee041ff7b34cc62f07545e55e1468808691dddfd315d51dd82a6b37ddef2"}, - {file = "fonttools-4.54.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41bb0b250c8132b2fcac148e2e9198e62ff06f3cc472065dff839327945c5882"}, - {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7965af9b67dd546e52afcf2e38641b5be956d68c425bef2158e95af11d229f10"}, - {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278913a168f90d53378c20c23b80f4e599dca62fbffae4cc620c8eed476b723e"}, - {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0e88e3018ac809b9662615072dcd6b84dca4c2d991c6d66e1970a112503bba7e"}, - {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4817f0031206e637d1e685251ac61be64d1adef111060df84fdcbc6ab6c44"}, - {file = "fonttools-4.54.1-cp310-cp310-win32.whl", hash = "sha256:7e3b7d44e18c085fd8c16dcc6f1ad6c61b71ff463636fcb13df7b1b818bd0c02"}, - {file = "fonttools-4.54.1-cp310-cp310-win_amd64.whl", hash = "sha256:dd9cc95b8d6e27d01e1e1f1fae8559ef3c02c76317da650a19047f249acd519d"}, - {file = "fonttools-4.54.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5419771b64248484299fa77689d4f3aeed643ea6630b2ea750eeab219588ba20"}, - {file = "fonttools-4.54.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:301540e89cf4ce89d462eb23a89464fef50915255ece765d10eee8b2bf9d75b2"}, - {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ae5091547e74e7efecc3cbf8e75200bc92daaeb88e5433c5e3e95ea8ce5aa7"}, - {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82834962b3d7c5ca98cb56001c33cf20eb110ecf442725dc5fdf36d16ed1ab07"}, - {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d26732ae002cc3d2ecab04897bb02ae3f11f06dd7575d1df46acd2f7c012a8d8"}, - {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58974b4987b2a71ee08ade1e7f47f410c367cdfc5a94fabd599c88165f56213a"}, - {file = "fonttools-4.54.1-cp311-cp311-win32.whl", hash = "sha256:ab774fa225238986218a463f3fe151e04d8c25d7de09df7f0f5fce27b1243dbc"}, - {file = "fonttools-4.54.1-cp311-cp311-win_amd64.whl", hash = "sha256:07e005dc454eee1cc60105d6a29593459a06321c21897f769a281ff2d08939f6"}, - {file = "fonttools-4.54.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:54471032f7cb5fca694b5f1a0aaeba4af6e10ae989df408e0216f7fd6cdc405d"}, - {file = "fonttools-4.54.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fa92cb248e573daab8d032919623cc309c005086d743afb014c836636166f08"}, - {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a911591200114969befa7f2cb74ac148bce5a91df5645443371aba6d222e263"}, - {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93d458c8a6a354dc8b48fc78d66d2a8a90b941f7fec30e94c7ad9982b1fa6bab"}, - {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5eb2474a7c5be8a5331146758debb2669bf5635c021aee00fd7c353558fc659d"}, - {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9c563351ddc230725c4bdf7d9e1e92cbe6ae8553942bd1fb2b2ff0884e8b714"}, - {file = "fonttools-4.54.1-cp312-cp312-win32.whl", hash = "sha256:fdb062893fd6d47b527d39346e0c5578b7957dcea6d6a3b6794569370013d9ac"}, - {file = "fonttools-4.54.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4564cf40cebcb53f3dc825e85910bf54835e8a8b6880d59e5159f0f325e637e"}, - {file = "fonttools-4.54.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6e37561751b017cf5c40fce0d90fd9e8274716de327ec4ffb0df957160be3bff"}, - {file = "fonttools-4.54.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:357cacb988a18aace66e5e55fe1247f2ee706e01debc4b1a20d77400354cddeb"}, - {file = "fonttools-4.54.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e953cc0bddc2beaf3a3c3b5dd9ab7554677da72dfaf46951e193c9653e515a"}, - {file = "fonttools-4.54.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58d29b9a294573d8319f16f2f79e42428ba9b6480442fa1836e4eb89c4d9d61c"}, - {file = "fonttools-4.54.1-cp313-cp313-win32.whl", hash = "sha256:9ef1b167e22709b46bf8168368b7b5d3efeaaa746c6d39661c1b4405b6352e58"}, - {file = "fonttools-4.54.1-cp313-cp313-win_amd64.whl", hash = "sha256:262705b1663f18c04250bd1242b0515d3bbae177bee7752be67c979b7d47f43d"}, - {file = "fonttools-4.54.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ed2f80ca07025551636c555dec2b755dd005e2ea8fbeb99fc5cdff319b70b23b"}, - {file = "fonttools-4.54.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dc080e5a1c3b2656caff2ac2633d009b3a9ff7b5e93d0452f40cd76d3da3b3c"}, - {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d152d1be65652fc65e695e5619e0aa0982295a95a9b29b52b85775243c06556"}, - {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8583e563df41fdecef31b793b4dd3af8a9caa03397be648945ad32717a92885b"}, - {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0d1d353ef198c422515a3e974a1e8d5b304cd54a4c2eebcae708e37cd9eeffb1"}, - {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fda582236fee135d4daeca056c8c88ec5f6f6d88a004a79b84a02547c8f57386"}, - {file = "fonttools-4.54.1-cp38-cp38-win32.whl", hash = "sha256:e7d82b9e56716ed32574ee106cabca80992e6bbdcf25a88d97d21f73a0aae664"}, - {file = "fonttools-4.54.1-cp38-cp38-win_amd64.whl", hash = "sha256:ada215fd079e23e060157aab12eba0d66704316547f334eee9ff26f8c0d7b8ab"}, - {file = "fonttools-4.54.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5b8a096e649768c2f4233f947cf9737f8dbf8728b90e2771e2497c6e3d21d13"}, - {file = "fonttools-4.54.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e10d2e0a12e18f4e2dd031e1bf7c3d7017be5c8dbe524d07706179f355c5dac"}, - {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c32d7d4b0958600eac75eaf524b7b7cb68d3a8c196635252b7a2c30d80e986"}, - {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c39287f5c8f4a0c5a55daf9eaf9ccd223ea59eed3f6d467133cc727d7b943a55"}, - {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a7a310c6e0471602fe3bf8efaf193d396ea561486aeaa7adc1f132e02d30c4b9"}, - {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3b659d1029946f4ff9b6183984578041b520ce0f8fb7078bb37ec7445806b33"}, - {file = "fonttools-4.54.1-cp39-cp39-win32.whl", hash = "sha256:e96bc94c8cda58f577277d4a71f51c8e2129b8b36fd05adece6320dd3d57de8a"}, - {file = "fonttools-4.54.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8a4b261c1ef91e7188a30571be6ad98d1c6d9fa2427244c545e2fa0a2494dd7"}, - {file = "fonttools-4.54.1-py3-none-any.whl", hash = "sha256:37cddd62d83dc4f72f7c3f3c2bcf2697e89a30efb152079896544a93907733bd"}, - {file = "fonttools-4.54.1.tar.gz", hash = "sha256:957f669d4922f92c171ba01bef7f29410668db09f6c02111e22b2bce446f3285"}, + {file = "fonttools-4.55.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bef0f8603834643b1a6419d57902f18e7d950ec1a998fb70410635c598dc1a1e"}, + {file = "fonttools-4.55.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:944228b86d472612d3b48bcc83b31c25c2271e63fdc74539adfcfa7a96d487fb"}, + {file = "fonttools-4.55.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f0e55f5da594b85f269cfbecd2f6bd3e07d0abba68870bc3f34854de4fa4678"}, + {file = "fonttools-4.55.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b1a6e576db0c83c1b91925bf1363478c4bb968dbe8433147332fb5782ce6190"}, + {file = "fonttools-4.55.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:616368b15716781bc84df5c2191dc0540137aaef56c2771eb4b89b90933f347a"}, + {file = "fonttools-4.55.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bbae4f3915225c2c37670da68e2bf18a21206060ad31dfb95fec91ef641caa7"}, + {file = "fonttools-4.55.2-cp310-cp310-win32.whl", hash = "sha256:8b02b10648d69d67a7eb055f4d3eedf4a85deb22fb7a19fbd9acbae7c7538199"}, + {file = "fonttools-4.55.2-cp310-cp310-win_amd64.whl", hash = "sha256:bbea0ab841113ac8e8edde067e099b7288ffc6ac2dded538b131c2c0595d5f77"}, + {file = "fonttools-4.55.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d34525e8141286fa976e14806639d32294bfb38d28bbdb5f6be9f46a1cd695a6"}, + {file = "fonttools-4.55.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ecd1c2b1c2ec46bb73685bc5473c72e16ed0930ef79bc2919ccadc43a99fb16"}, + {file = "fonttools-4.55.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9008438ad59e5a8e403a62fbefef2b2ff377eb3857d90a3f2a5f4d674ff441b2"}, + {file = "fonttools-4.55.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:131591ac8d7a47043aaf29581aba755ae151d46e49d2bf49608601efd71e8b4d"}, + {file = "fonttools-4.55.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4c83381c3e3e3d9caa25527c4300543578341f21aae89e4fbbb4debdda8d82a2"}, + {file = "fonttools-4.55.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42aca564b575252fd9954ed0d91d97a24de24289a16ce8ff74ed0bdf5ecebf11"}, + {file = "fonttools-4.55.2-cp311-cp311-win32.whl", hash = "sha256:c6457f650ebe15baa17fc06e256227f0a47f46f80f27ec5a0b00160de8dc2c13"}, + {file = "fonttools-4.55.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cfa67414d7414442a5635ff634384101c54f53bb7b0e04aa6a61b013fcce194"}, + {file = "fonttools-4.55.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:18f082445b8fe5e91c53e6184f4c1c73f3f965c8bcc614c6cd6effd573ce6c1a"}, + {file = "fonttools-4.55.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c0f91adbbd706e8acd1db73e3e510118e62d0ffb651864567dccc5b2339f90"}, + {file = "fonttools-4.55.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d8ccce035320d63dba0c35f52499322f5531dbe85bba1514c7cea26297e4c54"}, + {file = "fonttools-4.55.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96e126df9615df214ec7f04bebcf60076297fbc10b75c777ce58b702d7708ffb"}, + {file = "fonttools-4.55.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:508ebb42956a7a931c4092dfa2d9b4ffd4f94cea09b8211199090d2bd082506b"}, + {file = "fonttools-4.55.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1b9de46ef7b683d50400abf9f1578eaceee271ff51c36bf4b7366f2be29f498"}, + {file = "fonttools-4.55.2-cp312-cp312-win32.whl", hash = "sha256:2df61d9fc15199cc86dad29f64dd686874a3a52dda0c2d8597d21f509f95c332"}, + {file = "fonttools-4.55.2-cp312-cp312-win_amd64.whl", hash = "sha256:d337ec087da8216a828574aa0525d869df0a2ac217a2efc1890974ddd1fbc5b9"}, + {file = "fonttools-4.55.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:10aff204e2edee1d312fa595c06f201adf8d528a3b659cfb34cd47eceaaa6a26"}, + {file = "fonttools-4.55.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:09fe922a3eff181fd07dd724cdb441fb6b9fc355fd1c0f1aa79aca60faf1fbdd"}, + {file = "fonttools-4.55.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487e1e8b524143a799bda0169c48b44a23a6027c1bb1957d5a172a7d3a1dd704"}, + {file = "fonttools-4.55.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b1726872e09268bbedb14dc02e58b7ea31ecdd1204c6073eda4911746b44797"}, + {file = "fonttools-4.55.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fc88cfb58b0cd7b48718c3e61dd0d0a3ee8e2c86b973342967ce09fbf1db6d4"}, + {file = "fonttools-4.55.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e857fe1859901ad8c5cab32e0eebc920adb09f413d2d73b74b677cf47b28590c"}, + {file = "fonttools-4.55.2-cp313-cp313-win32.whl", hash = "sha256:81ccd2b3a420b8050c7d9db3be0555d71662973b3ef2a1d921a2880b58957db8"}, + {file = "fonttools-4.55.2-cp313-cp313-win_amd64.whl", hash = "sha256:d559eb1744c7dcfa90ae60cb1a4b3595e898e48f4198738c321468c01180cd83"}, + {file = "fonttools-4.55.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6b5917ef79cac8300b88fd6113003fd01bbbbea2ea060a27b95d8f77cb4c65c2"}, + {file = "fonttools-4.55.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:663eba5615d6abaaf616432354eb7ce951d518e43404371bcc2b0694ef21e8d6"}, + {file = "fonttools-4.55.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:803d5cef5fc47f44f5084d154aa3d6f069bb1b60e32390c225f897fa19b0f939"}, + {file = "fonttools-4.55.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bc5f100de0173cc39102c0399bd6c3bd544bbdf224957933f10ee442d43cddd"}, + {file = "fonttools-4.55.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3d9bbc1e380fdaf04ad9eabd8e3e6a4301eaf3487940893e9fd98537ea2e283b"}, + {file = "fonttools-4.55.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:42a9afedff07b6f75aa0f39b5e49922ac764580ef3efce035ca30284b2ee65c8"}, + {file = "fonttools-4.55.2-cp38-cp38-win32.whl", hash = "sha256:f1c76f423f1a241df08f87614364dff6e0b7ce23c962c1b74bd995ec7c0dad13"}, + {file = "fonttools-4.55.2-cp38-cp38-win_amd64.whl", hash = "sha256:25062b6ca03464dd5179fc2040fb19e03391b7cc49b9cc4f879312e638605c5c"}, + {file = "fonttools-4.55.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d1100d8e665fe386a79cab59446992de881ea74d0d6c191bb988642692aa2421"}, + {file = "fonttools-4.55.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbdc251c5e472e5ae6bc816f9b82718b8e93ff7992e7331d6cf3562b96aa268e"}, + {file = "fonttools-4.55.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0bf24d2b02dbc9376d795a63062632ff73e3e9e60c0229373f500aed7e86dd7"}, + {file = "fonttools-4.55.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4ff250ed4ff05015dfd9cf2adf7570c7a383ca80f4d9732ac484a5ed0d8453c"}, + {file = "fonttools-4.55.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:44cf2a98aa661dbdeb8c03f5e405b074e2935196780bb729888639f5276067d9"}, + {file = "fonttools-4.55.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22ef222740eb89d189bf0612eb98fbae592c61d7efeac51bfbc2a1592d469557"}, + {file = "fonttools-4.55.2-cp39-cp39-win32.whl", hash = "sha256:93f439ca27e55f585e7aaa04a74990acd983b5f2245e41d6b79f0a8b44e684d8"}, + {file = "fonttools-4.55.2-cp39-cp39-win_amd64.whl", hash = "sha256:627cf10d6f5af5bec6324c18a2670f134c29e1b7dce3fb62e8ef88baa6cba7a9"}, + {file = "fonttools-4.55.2-py3-none-any.whl", hash = "sha256:8e2d89fbe9b08d96e22c7a81ec04a4e8d8439c31223e2dc6f2f9fc8ff14bdf9f"}, + {file = "fonttools-4.55.2.tar.gz", hash = "sha256:45947e7b3f9673f91df125d375eb57b9a23f2a603f438a1aebf3171bffa7a205"}, ] [package.extras] @@ -999,88 +1044,103 @@ woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] name = "frozenlist" -version = "1.4.1" +version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, + {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, + {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, + {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, + {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, + {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, + {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, + {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, + {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"}, + {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"}, + {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, + {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, + {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, + {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, + {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, ] [[package]] @@ -1144,13 +1204,13 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 [[package]] name = "google-api-core" -version = "2.20.0" +version = "2.24.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_core-2.20.0-py3-none-any.whl", hash = "sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a"}, - {file = "google_api_core-2.20.0.tar.gz", hash = "sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f"}, + {file = "google_api_core-2.24.0-py3-none-any.whl", hash = "sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9"}, + {file = "google_api_core-2.24.0.tar.gz", hash = "sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf"}, ] [package.dependencies] @@ -1164,24 +1224,28 @@ grpcio-status = [ {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] -proto-plus = ">=1.22.3,<2.0.0dev" +proto-plus = [ + {version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""}, + {version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""}, +] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" [package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.147.0" +version = "2.154.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_python_client-2.147.0-py2.py3-none-any.whl", hash = "sha256:c6ecfa193c695baa41e84562d8f8f244fcd164419eca3fc9fd7565646668f9b2"}, - {file = "google_api_python_client-2.147.0.tar.gz", hash = "sha256:e864c2cf61d34c00f05278b8bdb72b93b6fa34f0de9ead51d20435f3b65f91be"}, + {file = "google_api_python_client-2.154.0-py2.py3-none-any.whl", hash = "sha256:a521bbbb2ec0ba9d6f307cdd64ed6e21eeac372d1bd7493a4ab5022941f784ad"}, + {file = "google_api_python_client-2.154.0.tar.gz", hash = "sha256:1b420062e03bfcaa1c79e2e00a612d29a6a934151ceb3d272fe150a656dc8f17"}, ] [package.dependencies] @@ -1193,13 +1257,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.35.0" +version = "2.36.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, - {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, + {file = "google_auth-2.36.0-py2.py3-none-any.whl", hash = "sha256:51a15d47028b66fd36e5c64a82d2d57480075bccc7da37cde257fc94177a61fb"}, + {file = "google_auth-2.36.0.tar.gz", hash = "sha256:545e9618f2df0bcbb7dcbc45a546485b1212624716975a1ea5ae8149ce769ab1"}, ] [package.dependencies] @@ -1254,13 +1318,13 @@ dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "py [[package]] name = "googleapis-common-protos" -version = "1.65.0" +version = "1.66.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, - {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, + {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, + {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, ] [package.dependencies] @@ -1392,70 +1456,70 @@ typing-extensions = ">=4.7,<5" [[package]] name = "grpcio" -version = "1.66.2" +version = "1.68.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, - {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, - {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, - {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, - {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, - {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, - {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, - {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, - {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, - {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, - {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, - {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, - {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, - {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, - {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, - {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, - {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, - {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, - {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, - {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, - {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, - {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, - {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, - {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, - {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, + {file = "grpcio-1.68.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:d35740e3f45f60f3c37b1e6f2f4702c23867b9ce21c6410254c9c682237da68d"}, + {file = "grpcio-1.68.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d99abcd61760ebb34bdff37e5a3ba333c5cc09feda8c1ad42547bea0416ada78"}, + {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f8261fa2a5f679abeb2a0a93ad056d765cdca1c47745eda3f2d87f874ff4b8c9"}, + {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0feb02205a27caca128627bd1df4ee7212db051019a9afa76f4bb6a1a80ca95e"}, + {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919d7f18f63bcad3a0f81146188e90274fde800a94e35d42ffe9eadf6a9a6330"}, + {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:963cc8d7d79b12c56008aabd8b457f400952dbea8997dd185f155e2f228db079"}, + {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ccf2ebd2de2d6661e2520dae293298a3803a98ebfc099275f113ce1f6c2a80f1"}, + {file = "grpcio-1.68.1-cp310-cp310-win32.whl", hash = "sha256:2cc1fd04af8399971bcd4f43bd98c22d01029ea2e56e69c34daf2bf8470e47f5"}, + {file = "grpcio-1.68.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2e743e51cb964b4975de572aa8fb95b633f496f9fcb5e257893df3be854746"}, + {file = "grpcio-1.68.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:55857c71641064f01ff0541a1776bfe04a59db5558e82897d35a7793e525774c"}, + {file = "grpcio-1.68.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4b177f5547f1b995826ef529d2eef89cca2f830dd8b2c99ffd5fde4da734ba73"}, + {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:3522c77d7e6606d6665ec8d50e867f13f946a4e00c7df46768f1c85089eae515"}, + {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d1fae6bbf0816415b81db1e82fb3bf56f7857273c84dcbe68cbe046e58e1ccd"}, + {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:298ee7f80e26f9483f0b6f94cc0a046caf54400a11b644713bb5b3d8eb387600"}, + {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cbb5780e2e740b6b4f2d208e90453591036ff80c02cc605fea1af8e6fc6b1bbe"}, + {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ddda1aa22495d8acd9dfbafff2866438d12faec4d024ebc2e656784d96328ad0"}, + {file = "grpcio-1.68.1-cp311-cp311-win32.whl", hash = "sha256:b33bd114fa5a83f03ec6b7b262ef9f5cac549d4126f1dc702078767b10c46ed9"}, + {file = "grpcio-1.68.1-cp311-cp311-win_amd64.whl", hash = "sha256:7f20ebec257af55694d8f993e162ddf0d36bd82d4e57f74b31c67b3c6d63d8b2"}, + {file = "grpcio-1.68.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:8829924fffb25386995a31998ccbbeaa7367223e647e0122043dfc485a87c666"}, + {file = "grpcio-1.68.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3aed6544e4d523cd6b3119b0916cef3d15ef2da51e088211e4d1eb91a6c7f4f1"}, + {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:4efac5481c696d5cb124ff1c119a78bddbfdd13fc499e3bc0ca81e95fc573684"}, + {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ab2d912ca39c51f46baf2a0d92aa265aa96b2443266fc50d234fa88bf877d8e"}, + {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c87ce2a97434dffe7327a4071839ab8e8bffd0054cc74cbe971fba98aedd60"}, + {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e4842e4872ae4ae0f5497bf60a0498fa778c192cc7a9e87877abd2814aca9475"}, + {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:255b1635b0ed81e9f91da4fcc8d43b7ea5520090b9a9ad9340d147066d1d3613"}, + {file = "grpcio-1.68.1-cp312-cp312-win32.whl", hash = "sha256:7dfc914cc31c906297b30463dde0b9be48e36939575eaf2a0a22a8096e69afe5"}, + {file = "grpcio-1.68.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0c8ddabef9c8f41617f213e527254c41e8b96ea9d387c632af878d05db9229c"}, + {file = "grpcio-1.68.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:a47faedc9ea2e7a3b6569795c040aae5895a19dde0c728a48d3c5d7995fda385"}, + {file = "grpcio-1.68.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:390eee4225a661c5cd133c09f5da1ee3c84498dc265fd292a6912b65c421c78c"}, + {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:66a24f3d45c33550703f0abb8b656515b0ab777970fa275693a2f6dc8e35f1c1"}, + {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c08079b4934b0bf0a8847f42c197b1d12cba6495a3d43febd7e99ecd1cdc8d54"}, + {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8720c25cd9ac25dd04ee02b69256d0ce35bf8a0f29e20577427355272230965a"}, + {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:04cfd68bf4f38f5bb959ee2361a7546916bd9a50f78617a346b3aeb2b42e2161"}, + {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c28848761a6520c5c6071d2904a18d339a796ebe6b800adc8b3f474c5ce3c3ad"}, + {file = "grpcio-1.68.1-cp313-cp313-win32.whl", hash = "sha256:77d65165fc35cff6e954e7fd4229e05ec76102d4406d4576528d3a3635fc6172"}, + {file = "grpcio-1.68.1-cp313-cp313-win_amd64.whl", hash = "sha256:a8040f85dcb9830d8bbb033ae66d272614cec6faceee88d37a88a9bd1a7a704e"}, + {file = "grpcio-1.68.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:eeb38ff04ab6e5756a2aef6ad8d94e89bb4a51ef96e20f45c44ba190fa0bcaad"}, + {file = "grpcio-1.68.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a3869a6661ec8f81d93f4597da50336718bde9eb13267a699ac7e0a1d6d0bea"}, + {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2c4cec6177bf325eb6faa6bd834d2ff6aa8bb3b29012cceb4937b86f8b74323c"}, + {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12941d533f3cd45d46f202e3667be8ebf6bcb3573629c7ec12c3e211d99cfccf"}, + {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80af6f1e69c5e68a2be529990684abdd31ed6622e988bf18850075c81bb1ad6e"}, + {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e8dbe3e00771bfe3d04feed8210fc6617006d06d9a2679b74605b9fed3e8362c"}, + {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:83bbf5807dc3ee94ce1de2dfe8a356e1d74101e4b9d7aa8c720cc4818a34aded"}, + {file = "grpcio-1.68.1-cp38-cp38-win32.whl", hash = "sha256:8cb620037a2fd9eeee97b4531880e439ebfcd6d7d78f2e7dcc3726428ab5ef63"}, + {file = "grpcio-1.68.1-cp38-cp38-win_amd64.whl", hash = "sha256:52fbf85aa71263380d330f4fce9f013c0798242e31ede05fcee7fbe40ccfc20d"}, + {file = "grpcio-1.68.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb400138e73969eb5e0535d1d06cae6a6f7a15f2cc74add320e2130b8179211a"}, + {file = "grpcio-1.68.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a1b988b40f2fd9de5c820f3a701a43339d8dcf2cb2f1ca137e2c02671cc83ac1"}, + {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:96f473cdacfdd506008a5d7579c9f6a7ff245a9ade92c3c0265eb76cc591914f"}, + {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37ea3be171f3cf3e7b7e412a98b77685eba9d4fd67421f4a34686a63a65d99f9"}, + {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ceb56c4285754e33bb3c2fa777d055e96e6932351a3082ce3559be47f8024f0"}, + {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dffd29a2961f3263a16d73945b57cd44a8fd0b235740cb14056f0612329b345e"}, + {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:025f790c056815b3bf53da850dd70ebb849fd755a4b1ac822cb65cd631e37d43"}, + {file = "grpcio-1.68.1-cp39-cp39-win32.whl", hash = "sha256:1098f03dedc3b9810810568060dea4ac0822b4062f537b0f53aa015269be0a76"}, + {file = "grpcio-1.68.1-cp39-cp39-win_amd64.whl", hash = "sha256:334ab917792904245a028f10e803fcd5b6f36a7b2173a820c0b5b076555825e1"}, + {file = "grpcio-1.68.1.tar.gz", hash = "sha256:44a8502dd5de653ae6a73e2de50a401d84184f0331d0ac3daeb044e66d5c5054"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.66.2)"] +protobuf = ["grpcio-tools (>=1.68.1)"] [[package]] name = "grpcio-status" @@ -1486,13 +1550,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.6" +version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, - {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] @@ -1557,13 +1621,13 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.25.1" +version = "0.26.5" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.25.1-py3-none-any.whl", hash = "sha256:a5158ded931b3188f54ea9028097312cb0acd50bffaaa2612014c3c526b44972"}, - {file = "huggingface_hub-0.25.1.tar.gz", hash = "sha256:9ff7cb327343211fbd06e2b149b8f362fd1e389454f3f14c6db75a4999ee20ff"}, + {file = "huggingface_hub-0.26.5-py3-none-any.whl", hash = "sha256:fb7386090bbe892072e64b85f7c4479fd2d65eea5f2543327c970d5169e83924"}, + {file = "huggingface_hub-0.26.5.tar.gz", hash = "sha256:1008bd18f60bfb65e8dbc0a97249beeeaa8c99d3c2fa649354df9fa5a13ed83b"}, ] [package.dependencies] @@ -1576,28 +1640,28 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "libcst (==1.4.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "libcst (==1.4.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"] +inference = ["aiohttp"] +quality = ["libcst (==1.4.0)", "mypy (==1.5.1)", "ruff (>=0.5.0)"] tensorflow = ["graphviz", "pydot", "tensorflow"] tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] torch = ["safetensors[torch]", "torch"] typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] [[package]] name = "identify" -version = "2.6.1" +version = "2.6.3" description = "File identification library for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, - {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, + {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, + {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, ] [package.extras] @@ -1692,72 +1756,87 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jiter" -version = "0.5.0" +version = "0.8.2" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f"}, - {file = "jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d"}, - {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87"}, - {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e"}, - {file = "jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf"}, - {file = "jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e"}, - {file = "jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553"}, - {file = "jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06"}, - {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403"}, - {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646"}, - {file = "jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb"}, - {file = "jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae"}, - {file = "jiter-0.5.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9f664e7351604f91dcdd557603c57fc0d551bc65cc0a732fdacbf73ad335049a"}, - {file = "jiter-0.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:044f2f1148b5248ad2c8c3afb43430dccf676c5a5834d2f5089a4e6c5bbd64df"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:702e3520384c88b6e270c55c772d4bd6d7b150608dcc94dea87ceba1b6391248"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:528d742dcde73fad9d63e8242c036ab4a84389a56e04efd854062b660f559544"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf80e5fe6ab582c82f0c3331df27a7e1565e2dcf06265afd5173d809cdbf9ba"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:44dfc9ddfb9b51a5626568ef4e55ada462b7328996294fe4d36de02fce42721f"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c451f7922992751a936b96c5f5b9bb9312243d9b754c34b33d0cb72c84669f4e"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:308fce789a2f093dca1ff91ac391f11a9f99c35369117ad5a5c6c4903e1b3e3a"}, - {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7f5ad4a7c6b0d90776fdefa294f662e8a86871e601309643de30bf94bb93a64e"}, - {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ea189db75f8eca08807d02ae27929e890c7d47599ce3d0a6a5d41f2419ecf338"}, - {file = "jiter-0.5.0-cp312-none-win32.whl", hash = "sha256:e3bbe3910c724b877846186c25fe3c802e105a2c1fc2b57d6688b9f8772026e4"}, - {file = "jiter-0.5.0-cp312-none-win_amd64.whl", hash = "sha256:a586832f70c3f1481732919215f36d41c59ca080fa27a65cf23d9490e75b2ef5"}, - {file = "jiter-0.5.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f04bc2fc50dc77be9d10f73fcc4e39346402ffe21726ff41028f36e179b587e6"}, - {file = "jiter-0.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f433a4169ad22fcb550b11179bb2b4fd405de9b982601914ef448390b2954f3"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad4a6398c85d3a20067e6c69890ca01f68659da94d74c800298581724e426c7e"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6baa88334e7af3f4d7a5c66c3a63808e5efbc3698a1c57626541ddd22f8e4fbf"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ece0a115c05efca597c6d938f88c9357c843f8c245dbbb53361a1c01afd7148"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:335942557162ad372cc367ffaf93217117401bf930483b4b3ebdb1223dbddfa7"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649b0ee97a6e6da174bffcb3c8c051a5935d7d4f2f52ea1583b5b3e7822fbf14"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4be354c5de82157886ca7f5925dbda369b77344b4b4adf2723079715f823989"}, - {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5206144578831a6de278a38896864ded4ed96af66e1e63ec5dd7f4a1fce38a3a"}, - {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8120c60f8121ac3d6f072b97ef0e71770cc72b3c23084c72c4189428b1b1d3b6"}, - {file = "jiter-0.5.0-cp38-none-win32.whl", hash = "sha256:6f1223f88b6d76b519cb033a4d3687ca157c272ec5d6015c322fc5b3074d8a5e"}, - {file = "jiter-0.5.0-cp38-none-win_amd64.whl", hash = "sha256:c59614b225d9f434ea8fc0d0bec51ef5fa8c83679afedc0433905994fb36d631"}, - {file = "jiter-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0af3838cfb7e6afee3f00dc66fa24695199e20ba87df26e942820345b0afc566"}, - {file = "jiter-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:550b11d669600dbc342364fd4adbe987f14d0bbedaf06feb1b983383dcc4b961"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:489875bf1a0ffb3cb38a727b01e6673f0f2e395b2aad3c9387f94187cb214bbf"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b250ca2594f5599ca82ba7e68785a669b352156260c5362ea1b4e04a0f3e2389"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ea18e01f785c6667ca15407cd6dabbe029d77474d53595a189bdc813347218e"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462a52be85b53cd9bffd94e2d788a09984274fe6cebb893d6287e1c296d50653"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92cc68b48d50fa472c79c93965e19bd48f40f207cb557a8346daa020d6ba973b"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c834133e59a8521bc87ebcad773608c6fa6ab5c7a022df24a45030826cf10bc"}, - {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab3a71ff31cf2d45cb216dc37af522d335211f3a972d2fe14ea99073de6cb104"}, - {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cccd3af9c48ac500c95e1bcbc498020c87e1781ff0345dd371462d67b76643eb"}, - {file = "jiter-0.5.0-cp39-none-win32.whl", hash = "sha256:368084d8d5c4fc40ff7c3cc513c4f73e02c85f6009217922d0823a48ee7adf61"}, - {file = "jiter-0.5.0-cp39-none-win_amd64.whl", hash = "sha256:ce03f7b4129eb72f1687fa11300fbf677b02990618428934662406d2a76742a1"}, - {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff"}, + {file = "jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43"}, + {file = "jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6"}, + {file = "jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44"}, + {file = "jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29"}, + {file = "jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e"}, + {file = "jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ca1f08b8e43dc3bd0594c992fb1fd2f7ce87f7bf0d44358198d6da8034afdf84"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5672a86d55416ccd214c778efccf3266b84f87b89063b582167d803246354be4"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58dc9bc9767a1101f4e5e22db1b652161a225874d66f0e5cb8e2c7d1c438b587"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b2998606d6dadbb5ccda959a33d6a5e853252d921fec1792fc902351bb4e2c"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab9a87f3784eb0e098f84a32670cfe4a79cb6512fd8f42ae3d0709f06405d18"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79aec8172b9e3c6d05fd4b219d5de1ac616bd8da934107325a6c0d0e866a21b6"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711e408732d4e9a0208008e5892c2966b485c783cd2d9a681f3eb147cf36c7ef"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:653cf462db4e8c41995e33d865965e79641ef45369d8a11f54cd30888b7e6ff1"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:9c63eaef32b7bebac8ebebf4dabebdbc6769a09c127294db6babee38e9f405b9"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:eb21aaa9a200d0a80dacc7a81038d2e476ffe473ffdd9c91eb745d623561de05"}, + {file = "jiter-0.8.2-cp313-cp313-win32.whl", hash = "sha256:789361ed945d8d42850f919342a8665d2dc79e7e44ca1c97cc786966a21f627a"}, + {file = "jiter-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ab7f43235d71e03b941c1630f4b6e3055d46b6cb8728a17663eaac9d8e83a865"}, + {file = "jiter-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b426f72cd77da3fec300ed3bc990895e2dd6b49e3bfe6c438592a3ba660e41ca"}, + {file = "jiter-0.8.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2dd880785088ff2ad21ffee205e58a8c1ddabc63612444ae41e5e4b321b39c0"}, + {file = "jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9e1fa156ee9454642adb7e7234a383884452532bc9d53d5af2d18d98ada1d79c"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cf5dfa9956d96ff2efb0f8e9c7d055904012c952539a774305aaaf3abdf3d6c"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e52bf98c7e727dd44f7c4acb980cb988448faeafed8433c867888268899b298b"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2ecaa3c23e7a7cf86d00eda3390c232f4d533cd9ddea4b04f5d0644faf642c5"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08d4c92bf480e19fc3f2717c9ce2aa31dceaa9163839a311424b6862252c943e"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d9a1eded738299ba8e106c6779ce5c3893cffa0e32e4485d680588adae6db8"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20be8b7f606df096e08b0b1b4a3c6f0515e8dac296881fe7461dfa0fb5ec817"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33f94615fcaf872f7fd8cd98ac3b429e435c77619777e8a449d9d27e01134d1"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:317b25e98a35ffec5c67efe56a4e9970852632c810d35b34ecdd70cc0e47b3b6"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9043259ee430ecd71d178fccabd8c332a3bf1e81e50cae43cc2b28d19e4cb7"}, + {file = "jiter-0.8.2-cp38-cp38-win32.whl", hash = "sha256:fc5adda618205bd4678b146612ce44c3cbfdee9697951f2c0ffdef1f26d72b63"}, + {file = "jiter-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cd646c827b4f85ef4a78e4e58f4f5854fae0caf3db91b59f0d73731448a970c6"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637"}, + {file = "jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36"}, + {file = "jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a"}, + {file = "jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d"}, ] [[package]] @@ -1908,6 +1987,39 @@ files = [ {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"}, ] +[[package]] +name = "lancedb" +version = "0.5.7" +description = "lancedb" +optional = false +python-versions = ">=3.8" +files = [ + {file = "lancedb-0.5.7-py3-none-any.whl", hash = "sha256:6169966f715ef530be545950e1aaf9f3f160967e4ba7456cd67c9f30f678095d"}, + {file = "lancedb-0.5.7.tar.gz", hash = "sha256:878914b493f91d09a77b14f1528104741f273234cbdd6671be705f447701fd51"}, +] + +[package.dependencies] +attrs = ">=21.3.0" +cachetools = "*" +click = ">=8.1.7" +deprecation = "*" +overrides = ">=0.7" +pydantic = ">=1.10" +pylance = "0.9.18" +pyyaml = ">=6.0" +ratelimiter = ">=1.0,<2.0" +requests = ">=2.31.0" +retry = ">=0.9.2" +semver = ">=3.0" +tqdm = ">=4.27.0" + +[package.extras] +clip = ["open-clip", "pillow", "torch"] +dev = ["pre-commit", "ruff"] +docs = ["mkdocs", "mkdocs-jupyter", "mkdocs-material", "mkdocs-ultralytics-plugin (==0.0.44)", "mkdocstrings[python]"] +embeddings = ["InstructorEmbedding", "awscli (>=1.29.57)", "boto3 (>=1.28.57)", "botocore (>=1.31.57)", "cohere", "google.generativeai", "huggingface-hub", "open-clip-torch", "openai (>=1.6.1)", "pillow", "sentence-transformers", "torch"] +tests = ["aiohttp", "duckdb", "pandas (>=1.4)", "polars (>=0.19)", "pytest", "pytest-asyncio", "pytest-mock", "pytz"] + [[package]] name = "markdown" version = "3.7" @@ -1928,120 +2040,122 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "matplotlib" -version = "3.9.2" +version = "3.9.3" description = "Python plotting package" optional = false python-versions = ">=3.9" files = [ - {file = "matplotlib-3.9.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9d78bbc0cbc891ad55b4f39a48c22182e9bdaea7fc0e5dbd364f49f729ca1bbb"}, - {file = "matplotlib-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c375cc72229614632c87355366bdf2570c2dac01ac66b8ad048d2dabadf2d0d4"}, - {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d94ff717eb2bd0b58fe66380bd8b14ac35f48a98e7c6765117fe67fb7684e64"}, - {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab68d50c06938ef28681073327795c5db99bb4666214d2d5f880ed11aeaded66"}, - {file = "matplotlib-3.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:65aacf95b62272d568044531e41de26285d54aec8cb859031f511f84bd8b495a"}, - {file = "matplotlib-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:3fd595f34aa8a55b7fc8bf9ebea8aa665a84c82d275190a61118d33fbc82ccae"}, - {file = "matplotlib-3.9.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8dd059447824eec055e829258ab092b56bb0579fc3164fa09c64f3acd478772"}, - {file = "matplotlib-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c797dac8bb9c7a3fd3382b16fe8f215b4cf0f22adccea36f1545a6d7be310b41"}, - {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d719465db13267bcef19ea8954a971db03b9f48b4647e3860e4bc8e6ed86610f"}, - {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8912ef7c2362f7193b5819d17dae8629b34a95c58603d781329712ada83f9447"}, - {file = "matplotlib-3.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7741f26a58a240f43bee74965c4882b6c93df3e7eb3de160126d8c8f53a6ae6e"}, - {file = "matplotlib-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ae82a14dab96fbfad7965403c643cafe6515e386de723e498cf3eeb1e0b70cc7"}, - {file = "matplotlib-3.9.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac43031375a65c3196bee99f6001e7fa5bdfb00ddf43379d3c0609bdca042df9"}, - {file = "matplotlib-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be0fc24a5e4531ae4d8e858a1a548c1fe33b176bb13eff7f9d0d38ce5112a27d"}, - {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf81de2926c2db243c9b2cbc3917619a0fc85796c6ba4e58f541df814bbf83c7"}, - {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ee45bc4245533111ced13f1f2cace1e7f89d1c793390392a80c139d6cf0e6c"}, - {file = "matplotlib-3.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:306c8dfc73239f0e72ac50e5a9cf19cc4e8e331dd0c54f5e69ca8758550f1e1e"}, - {file = "matplotlib-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:5413401594cfaff0052f9d8b1aafc6d305b4bd7c4331dccd18f561ff7e1d3bd3"}, - {file = "matplotlib-3.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18128cc08f0d3cfff10b76baa2f296fc28c4607368a8402de61bb3f2eb33c7d9"}, - {file = "matplotlib-3.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4876d7d40219e8ae8bb70f9263bcbe5714415acfdf781086601211335e24f8aa"}, - {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d9f07a80deab4bb0b82858a9e9ad53d1382fd122be8cde11080f4e7dfedb38b"}, - {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7c0410f181a531ec4e93bbc27692f2c71a15c2da16766f5ba9761e7ae518413"}, - {file = "matplotlib-3.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:909645cce2dc28b735674ce0931a4ac94e12f5b13f6bb0b5a5e65e7cea2c192b"}, - {file = "matplotlib-3.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:f32c7410c7f246838a77d6d1eff0c0f87f3cb0e7c4247aebea71a6d5a68cab49"}, - {file = "matplotlib-3.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:37e51dd1c2db16ede9cfd7b5cabdfc818b2c6397c83f8b10e0e797501c963a03"}, - {file = "matplotlib-3.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b82c5045cebcecd8496a4d694d43f9cc84aeeb49fe2133e036b207abe73f4d30"}, - {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f053c40f94bc51bc03832a41b4f153d83f2062d88c72b5e79997072594e97e51"}, - {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbe196377a8248972f5cede786d4c5508ed5f5ca4a1e09b44bda889958b33f8c"}, - {file = "matplotlib-3.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5816b1e1fe8c192cbc013f8f3e3368ac56fbecf02fb41b8f8559303f24c5015e"}, - {file = "matplotlib-3.9.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cef2a73d06601437be399908cf13aee74e86932a5ccc6ccdf173408ebc5f6bb2"}, - {file = "matplotlib-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0830e188029c14e891fadd99702fd90d317df294c3298aad682739c5533721a"}, - {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ba9c1299c920964e8d3857ba27173b4dbb51ca4bab47ffc2c2ba0eb5e2cbc5"}, - {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd93b91ab47a3616b4d3c42b52f8363b88ca021e340804c6ab2536344fad9ca"}, - {file = "matplotlib-3.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6d1ce5ed2aefcdce11904fc5bbea7d9c21fff3d5f543841edf3dea84451a09ea"}, - {file = "matplotlib-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:b2696efdc08648536efd4e1601b5fd491fd47f4db97a5fbfd175549a7365c1b2"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d52a3b618cb1cbb769ce2ee1dcdb333c3ab6e823944e9a2d36e37253815f9556"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:039082812cacd6c6bec8e17a9c1e6baca230d4116d522e81e1f63a74d01d2e21"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6758baae2ed64f2331d4fd19be38b7b4eae3ecec210049a26b6a4f3ae1c85dcc"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:050598c2b29e0b9832cde72bcf97627bf00262adbc4a54e2b856426bb2ef0697"}, - {file = "matplotlib-3.9.2.tar.gz", hash = "sha256:96ab43906269ca64a6366934106fa01534454a69e471b7bf3d79083981aaab92"}, + {file = "matplotlib-3.9.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:41b016e3be4e740b66c79a031a0a6e145728dbc248142e751e8dab4f3188ca1d"}, + {file = "matplotlib-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e0143975fc2a6d7136c97e19c637321288371e8f09cff2564ecd73e865ea0b9"}, + {file = "matplotlib-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f459c8ee2c086455744723628264e43c884be0c7d7b45d84b8cd981310b4815"}, + {file = "matplotlib-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:687df7ceff57b8f070d02b4db66f75566370e7ae182a0782b6d3d21b0d6917dc"}, + {file = "matplotlib-3.9.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:edd14cf733fdc4f6e6fe3f705af97676a7e52859bf0044aa2c84e55be739241c"}, + {file = "matplotlib-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:1c40c244221a1adbb1256692b1133c6fb89418df27bf759a31a333e7912a4010"}, + {file = "matplotlib-3.9.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cf2a60daf6cecff6828bc608df00dbc794380e7234d2411c0ec612811f01969d"}, + {file = "matplotlib-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:213d6dc25ce686516208d8a3e91120c6a4fdae4a3e06b8505ced5b716b50cc04"}, + {file = "matplotlib-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c52f48eb75fcc119a4fdb68ba83eb5f71656999420375df7c94cc68e0e14686e"}, + {file = "matplotlib-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3c93796b44fa111049b88a24105e947f03c01966b5c0cc782e2ee3887b790a3"}, + {file = "matplotlib-3.9.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cd1077b9a09b16d8c3c7075a8add5ffbfe6a69156a57e290c800ed4d435bef1d"}, + {file = "matplotlib-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:c96eeeb8c68b662c7747f91a385688d4b449687d29b691eff7068a4602fe6dc4"}, + {file = "matplotlib-3.9.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a361bd5583bf0bcc08841df3c10269617ee2a36b99ac39d455a767da908bbbc"}, + {file = "matplotlib-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e14485bb1b83eeb3d55b6878f9560240981e7bbc7a8d4e1e8c38b9bd6ec8d2de"}, + {file = "matplotlib-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d279f78844aad213c4935c18f8292a9432d51af2d88bca99072c903948045"}, + {file = "matplotlib-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6c12514329ac0d03128cf1dcceb335f4fbf7c11da98bca68dca8dcb983153a9"}, + {file = "matplotlib-3.9.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6e9de2b390d253a508dd497e9b5579f3a851f208763ed67fdca5dc0c3ea6849c"}, + {file = "matplotlib-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d796272408f8567ff7eaa00eb2856b3a00524490e47ad505b0b4ca6bb8a7411f"}, + {file = "matplotlib-3.9.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:203d18df84f5288973b2d56de63d4678cc748250026ca9e1ad8f8a0fd8a75d83"}, + {file = "matplotlib-3.9.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b651b0d3642991259109dc0351fc33ad44c624801367bb8307be9bfc35e427ad"}, + {file = "matplotlib-3.9.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66d7b171fecf96940ce069923a08ba3df33ef542de82c2ff4fe8caa8346fa95a"}, + {file = "matplotlib-3.9.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be0ba61f6ff2e6b68e4270fb63b6813c9e7dec3d15fc3a93f47480444fd72f0"}, + {file = "matplotlib-3.9.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d6b2e8856dec3a6db1ae51aec85c82223e834b228c1d3228aede87eee2b34f9"}, + {file = "matplotlib-3.9.3-cp313-cp313-win_amd64.whl", hash = "sha256:90a85a004fefed9e583597478420bf904bb1a065b0b0ee5b9d8d31b04b0f3f70"}, + {file = "matplotlib-3.9.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3119b2f16de7f7b9212ba76d8fe6a0e9f90b27a1e04683cd89833a991682f639"}, + {file = "matplotlib-3.9.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:87ad73763d93add1b6c1f9fcd33af662fd62ed70e620c52fcb79f3ac427cf3a6"}, + {file = "matplotlib-3.9.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:026bdf3137ab6022c866efa4813b6bbeddc2ed4c9e7e02f0e323a7bca380dfa0"}, + {file = "matplotlib-3.9.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760a5e89ebbb172989e8273024a1024b0f084510b9105261b3b00c15e9c9f006"}, + {file = "matplotlib-3.9.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a42b9dc42de2cfe357efa27d9c50c7833fc5ab9b2eb7252ccd5d5f836a84e1e4"}, + {file = "matplotlib-3.9.3-cp313-cp313t-win_amd64.whl", hash = "sha256:e0fcb7da73fbf67b5f4bdaa57d85bb585a4e913d4a10f3e15b32baea56a67f0a"}, + {file = "matplotlib-3.9.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:031b7f5b8e595cc07def77ec5b58464e9bb67dc5760be5d6f26d9da24892481d"}, + {file = "matplotlib-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fa6e193c14d6944e0685cdb527cb6b38b0e4a518043e7212f214113af7391da"}, + {file = "matplotlib-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6eefae6effa0c35bbbc18c25ee6e0b1da44d2359c3cd526eb0c9e703cf055d"}, + {file = "matplotlib-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d3e5c7a99bd28afb957e1ae661323b0800d75b419f24d041ed1cc5d844a764"}, + {file = "matplotlib-3.9.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:816a966d5d376bf24c92af8f379e78e67278833e4c7cbc9fa41872eec629a060"}, + {file = "matplotlib-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fb0b37c896172899a4a93d9442ffdc6f870165f59e05ce2e07c6fded1c15749"}, + {file = "matplotlib-3.9.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5f2a4ea08e6876206d511365b0bc234edc813d90b930be72c3011bbd7898796f"}, + {file = "matplotlib-3.9.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b081dac96ab19c54fd8558fac17c9d2c9cb5cc4656e7ed3261ddc927ba3e2c5"}, + {file = "matplotlib-3.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a0a63cb8404d1d1f94968ef35738900038137dab8af836b6c21bb6f03d75465"}, + {file = "matplotlib-3.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:896774766fd6be4571a43bc2fcbcb1dcca0807e53cab4a5bf88c4aa861a08e12"}, + {file = "matplotlib-3.9.3.tar.gz", hash = "sha256:cd5dbbc8e25cad5f706845c4d100e2c8b34691b412b93717ce38d8ae803bcfa5"}, ] [package.dependencies] @@ -2057,7 +2171,7 @@ pyparsing = ">=2.3.1" python-dateutil = ">=2.7" [package.extras] -dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] +dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] [[package]] name = "mpmath" @@ -2078,13 +2192,13 @@ tests = ["pytest (>=4.6)"] [[package]] name = "msal" -version = "1.31.0" +version = "1.31.1" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = false python-versions = ">=3.7" files = [ - {file = "msal-1.31.0-py3-none-any.whl", hash = "sha256:96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7"}, - {file = "msal-1.31.0.tar.gz", hash = "sha256:2c4f189cf9cc8f00c80045f66d39b7c0f3ed45873fd3d1f2af9f22db2e12ff4b"}, + {file = "msal-1.31.1-py3-none-any.whl", hash = "sha256:29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17"}, + {file = "msal-1.31.1.tar.gz", hash = "sha256:11b5e6a3f802ffd3a72107203e20c4eac6ef53401961b880af2835b723d80578"}, ] [package.dependencies] @@ -2240,38 +2354,43 @@ dill = ">=0.3.8" [[package]] name = "mypy" -version = "1.11.2" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, - {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, - {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, - {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, - {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, - {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, - {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, - {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, - {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, - {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, - {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, - {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, - {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, - {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, - {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, - {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, - {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, - {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, - {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] @@ -2281,6 +2400,7 @@ typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -2552,13 +2672,13 @@ httpx = ">=0.27.0,<0.28.0" [[package]] name = "openai" -version = "1.51.0" +version = "1.57.1" description = "The official Python library for the openai API" optional = false -python-versions = ">=3.7.1" +python-versions = ">=3.8" files = [ - {file = "openai-1.51.0-py3-none-any.whl", hash = "sha256:d9affafb7e51e5a27dce78589d4964ce4d6f6d560307265933a94b2e3f3c5d2c"}, - {file = "openai-1.51.0.tar.gz", hash = "sha256:8dc4f9d75ccdd5466fc8c99a952186eddceb9fd6ba694044773f3736a847149d"}, + {file = "openai-1.57.1-py3-none-any.whl", hash = "sha256:3865686c927e93492d1145938d4a24b634951531c4b2769d43ca5dbd4b25d8fd"}, + {file = "openai-1.57.1.tar.gz", hash = "sha256:a95f22e04ab3df26e64a15d958342265e802314131275908b3b3e36f8c5d4377"}, ] [package.dependencies] @@ -2574,15 +2694,26 @@ typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +[[package]] +name = "overrides" +version = "7.7.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +files = [ + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, +] + [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -2687,13 +2818,13 @@ dev = ["jinja2"] [[package]] name = "pgvector" -version = "0.3.5" +version = "0.3.6" description = "pgvector support for Python" optional = true python-versions = ">=3.8" files = [ - {file = "pgvector-0.3.5-py3-none-any.whl", hash = "sha256:56cca90392e596ea18873c593ec858a1984a77d16d1f82b8d0c180e79ef1018f"}, - {file = "pgvector-0.3.5.tar.gz", hash = "sha256:e876c9ee382c4c2f7ee57691a4c4015d688c7222e47448ce310ded03ecfafe2f"}, + {file = "pgvector-0.3.6-py3-none-any.whl", hash = "sha256:f6c269b3c110ccb7496bac87202148ed18f34b390a0189c783e351062400a75a"}, + {file = "pgvector-0.3.6.tar.gz", hash = "sha256:31d01690e6ea26cea8a633cde5f0f55f5b246d9c8292d68efdef8c22ec994ade"}, ] [package.dependencies] @@ -2701,95 +2832,90 @@ numpy = "*" [[package]] name = "pillow" -version = "10.4.0" +version = "11.0.0" description = "Python Imaging Library (Fork)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97"}, + {file = "pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50"}, + {file = "pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c"}, + {file = "pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9"}, + {file = "pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5"}, + {file = "pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291"}, + {file = "pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc"}, + {file = "pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6"}, + {file = "pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47"}, + {file = "pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb"}, + {file = "pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798"}, + {file = "pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de"}, + {file = "pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a"}, + {file = "pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8"}, + {file = "pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8"}, + {file = "pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904"}, + {file = "pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae"}, + {file = "pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4"}, + {file = "pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd"}, + {file = "pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944"}, + {file = "pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] @@ -2864,15 +2990,106 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "propcache" +version = "0.2.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +files = [ + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b"}, + {file = "propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4"}, + {file = "propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e"}, + {file = "propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034"}, + {file = "propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518"}, + {file = "propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246"}, + {file = "propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30"}, + {file = "propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6"}, + {file = "propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587"}, + {file = "propcache-0.2.1-cp39-cp39-win32.whl", hash = "sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb"}, + {file = "propcache-0.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1"}, + {file = "propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54"}, + {file = "propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64"}, +] + [[package]] name = "proto-plus" -version = "1.24.0" +version = "1.25.0" description = "Beautiful, Pythonic protocol buffers." optional = false python-versions = ">=3.7" files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, + {file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"}, + {file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"}, ] [package.dependencies] @@ -2901,54 +3118,68 @@ files = [ {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, ] +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + [[package]] name = "pyarrow" -version = "17.0.0" +version = "18.1.0" description = "Python library for Apache Arrow" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, - {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, - {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, - {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, - {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, - {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, - {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, - {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, + {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c"}, + {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f443122c8e31f4c9199cb23dca29ab9427cef990f283f80fe15b8e124bcc49b"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a03da7f2758645d17b7b4f83c8bffeae5bbb7f974523fe901f36288d2eab71"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ba17845efe3aa358ec266cf9cc2800fa73038211fb27968bfa88acd09261a470"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3c35813c11a059056a22a3bef520461310f2f7eea5c8a11ef9de7062a23f8d56"}, + {file = "pyarrow-18.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9736ba3c85129d72aefa21b4f3bd715bc4190fe4426715abfff90481e7d00812"}, + {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854"}, + {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0"}, + {file = "pyarrow-18.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a"}, + {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d"}, + {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30"}, + {file = "pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99"}, + {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:84e314d22231357d473eabec709d0ba285fa706a72377f9cc8e1cb3c8013813b"}, + {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f591704ac05dfd0477bb8f8e0bd4b5dc52c1cadf50503858dce3a15db6e46ff2"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acb7564204d3c40babf93a05624fc6a8ec1ab1def295c363afc40b0c9e66c191"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74de649d1d2ccb778f7c3afff6085bd5092aed4c23df9feeb45dd6b16f3811aa"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f96bd502cb11abb08efea6dab09c003305161cb6c9eafd432e35e76e7fa9b90c"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:36ac22d7782554754a3b50201b607d553a8d71b78cdf03b33c1125be4b52397c"}, + {file = "pyarrow-18.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:25dbacab8c5952df0ca6ca0af28f50d45bd31c1ff6fcf79e2d120b4a65ee7181"}, + {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a276190309aba7bc9d5bd2933230458b3521a4317acfefe69a354f2fe59f2bc"}, + {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ad514dbfcffe30124ce655d72771ae070f30bf850b48bc4d9d3b25993ee0e386"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebc13a11ed3032d8dd6e7171eb6e86d40d67a5639d96c35142bd568b9299324"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6cf5c05f3cee251d80e98726b5c7cc9f21bab9e9783673bac58e6dfab57ecc8"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:11b676cd410cf162d3f6a70b43fb9e1e40affbc542a1e9ed3681895f2962d3d9"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b76130d835261b38f14fc41fdfb39ad8d672afb84c447126b84d5472244cfaba"}, + {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:0b331e477e40f07238adc7ba7469c36b908f07c89b95dd4bd3a0ec84a3d1e21e"}, + {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c4dd0c9010a25ba03e198fe743b1cc03cd33c08190afff371749c52ccbbaf76"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f97b31b4c4e21ff58c6f330235ff893cc81e23da081b1a4b1c982075e0ed4e9"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a4813cb8ecf1809871fd2d64a8eff740a1bd3691bbe55f01a3cf6c5ec869754"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:05a5636ec3eb5cc2a36c6edb534a38ef57b2ab127292a716d00eabb887835f1e"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:73eeed32e724ea3568bb06161cad5fa7751e45bc2228e33dcb10c614044165c7"}, + {file = "pyarrow-18.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a1880dd6772b685e803011a6b43a230c23b566859a6e0c9a276c1e0faf4f4052"}, + {file = "pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73"}, ] -[package.dependencies] -numpy = ">=1.16.6" - [package.extras] test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] @@ -2990,22 +3221,19 @@ files = [ [[package]] name = "pydantic" -version = "2.9.2" +version = "2.10.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, - {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.4" -typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, -] +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -3013,100 +3241,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.4" +version = "2.27.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, - {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, - {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, - {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, - {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, - {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, - {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, - {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, - {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, - {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, - {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, - {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, - {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, - {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, ] [package.dependencies] @@ -3114,13 +3353,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pyjwt" -version = "2.9.0" +version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, - {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [package.dependencies] @@ -3132,15 +3371,39 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pylance" +version = "0.9.18" +description = "python wrapper for Lance columnar format" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pylance-0.9.18-cp38-abi3-macosx_10_15_x86_64.whl", hash = "sha256:fe2445d922c594d90e89111385106f6b152caab27996217db7bb4b8947eb0bea"}, + {file = "pylance-0.9.18-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:a2c424c50f5186edbbcc5a26f34063ed09d9a7390e28033395728ce02b5658f0"}, + {file = "pylance-0.9.18-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10af06edfde3e8451bf2251381d3980a0a164eab9d4c3d4dc8b6318969e958a6"}, + {file = "pylance-0.9.18-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:d8bb9045d7163cc966b9fe34a917044192be37a90915475b77461e5b7d89e442"}, + {file = "pylance-0.9.18-cp38-abi3-win_amd64.whl", hash = "sha256:5ea80b7bf70d992f3fe63bce2d2f064f742124c04eaedeb76baca408ded85a2c"}, +] + +[package.dependencies] +numpy = ">=1.22" +pyarrow = ">=12" + +[package.extras] +benchmarks = ["pytest-benchmark"] +dev = ["ruff (==0.2.2)"] +tests = ["datasets", "duckdb", "ml_dtypes", "pandas", "pillow", "polars[pandas,pyarrow]", "pytest", "tensorflow", "tqdm"] +torch = ["torch"] + [[package]] name = "pyparsing" -version = "3.1.4" +version = "3.2.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false -python-versions = ">=3.6.8" +python-versions = ">=3.9" files = [ - {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, - {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, + {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, + {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, ] [package.extras] @@ -3148,13 +3411,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "8.3.3" +version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, - {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] @@ -3226,29 +3489,29 @@ files = [ [[package]] name = "pywin32" -version = "307" +version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pywin32-307-cp310-cp310-win32.whl", hash = "sha256:f8f25d893c1e1ce2d685ef6d0a481e87c6f510d0f3f117932781f412e0eba31b"}, - {file = "pywin32-307-cp310-cp310-win_amd64.whl", hash = "sha256:36e650c5e5e6b29b5d317385b02d20803ddbac5d1031e1f88d20d76676dd103d"}, - {file = "pywin32-307-cp310-cp310-win_arm64.whl", hash = "sha256:0c12d61e0274e0c62acee79e3e503c312426ddd0e8d4899c626cddc1cafe0ff4"}, - {file = "pywin32-307-cp311-cp311-win32.whl", hash = "sha256:fec5d27cc893178fab299de911b8e4d12c5954e1baf83e8a664311e56a272b75"}, - {file = "pywin32-307-cp311-cp311-win_amd64.whl", hash = "sha256:987a86971753ed7fdd52a7fb5747aba955b2c7fbbc3d8b76ec850358c1cc28c3"}, - {file = "pywin32-307-cp311-cp311-win_arm64.whl", hash = "sha256:fd436897c186a2e693cd0437386ed79f989f4d13d6f353f8787ecbb0ae719398"}, - {file = "pywin32-307-cp312-cp312-win32.whl", hash = "sha256:07649ec6b01712f36debf39fc94f3d696a46579e852f60157a729ac039df0815"}, - {file = "pywin32-307-cp312-cp312-win_amd64.whl", hash = "sha256:00d047992bb5dcf79f8b9b7c81f72e0130f9fe4b22df613f755ab1cc021d8347"}, - {file = "pywin32-307-cp312-cp312-win_arm64.whl", hash = "sha256:b53658acbfc6a8241d72cc09e9d1d666be4e6c99376bc59e26cdb6223c4554d2"}, - {file = "pywin32-307-cp313-cp313-win32.whl", hash = "sha256:ea4d56e48dc1ab2aa0a5e3c0741ad6e926529510516db7a3b6981a1ae74405e5"}, - {file = "pywin32-307-cp313-cp313-win_amd64.whl", hash = "sha256:576d09813eaf4c8168d0bfd66fb7cb3b15a61041cf41598c2db4a4583bf832d2"}, - {file = "pywin32-307-cp313-cp313-win_arm64.whl", hash = "sha256:b30c9bdbffda6a260beb2919f918daced23d32c79109412c2085cbc513338a0a"}, - {file = "pywin32-307-cp37-cp37m-win32.whl", hash = "sha256:5101472f5180c647d4525a0ed289ec723a26231550dbfd369ec19d5faf60e511"}, - {file = "pywin32-307-cp37-cp37m-win_amd64.whl", hash = "sha256:05de55a7c110478dc4b202230e98af5e0720855360d2b31a44bb4e296d795fba"}, - {file = "pywin32-307-cp38-cp38-win32.whl", hash = "sha256:13d059fb7f10792542082f5731d5d3d9645320fc38814759313e5ee97c3fac01"}, - {file = "pywin32-307-cp38-cp38-win_amd64.whl", hash = "sha256:7e0b2f93769d450a98ac7a31a087e07b126b6d571e8b4386a5762eb85325270b"}, - {file = "pywin32-307-cp39-cp39-win32.whl", hash = "sha256:55ee87f2f8c294e72ad9d4261ca423022310a6e79fb314a8ca76ab3f493854c6"}, - {file = "pywin32-307-cp39-cp39-win_amd64.whl", hash = "sha256:e9d5202922e74985b037c9ef46778335c102b74b95cec70f629453dbe7235d87"}, + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] [[package]] @@ -3313,107 +3576,121 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "ratelimiter" +version = "1.2.0.post0" +description = "Simple python rate limiting object" +optional = false +python-versions = "*" +files = [ + {file = "ratelimiter-1.2.0.post0-py3-none-any.whl", hash = "sha256:a52be07bc0bb0b3674b4b304550f10c769bbb00fead3072e035904474259809f"}, + {file = "ratelimiter-1.2.0.post0.tar.gz", hash = "sha256:5c395dcabdbbde2e5178ef3f89b568a3066454a6ddc223b76473dac22f89b4f7"}, +] + +[package.extras] +test = ["pytest (>=3.0)", "pytest-asyncio"] + [[package]] name = "regex" -version = "2024.9.11" +version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"}, - {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"}, - {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"}, - {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"}, - {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"}, - {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"}, - {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"}, - {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"}, - {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"}, - {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"}, - {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"}, - {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"}, - {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"}, - {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, ] [[package]] @@ -3437,6 +3714,21 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "retry" +version = "0.9.2" +description = "Easy to use retry decorator." +optional = false +python-versions = "*" +files = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, +] + +[package.dependencies] +decorator = ">=3.4.2" +py = ">=1.4.26,<2.0.0" + [[package]] name = "rsa" version = "4.9" @@ -3453,13 +3745,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.10.2" +version = "0.10.4" description = "An Amazon S3 Transfer Manager" optional = true python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, + {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, + {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, ] [package.dependencies] @@ -3468,35 +3760,46 @@ botocore = ">=1.33.2,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] +[[package]] +name = "semver" +version = "3.0.2" +description = "Python helper for Semantic Versioning (https://semver.org)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, + {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, +] + [[package]] name = "setuptools" -version = "75.1.0" +version = "75.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, - {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, + {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, + {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -3512,60 +3815,68 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.35" +version = "2.0.36" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, - {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, - {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] [package.dependencies] @@ -3578,7 +3889,7 @@ aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] @@ -3718,111 +4029,26 @@ blobfile = ["blobfile (>=2)"] [[package]] name = "tokenizers" -version = "0.20.0" +version = "0.21.0" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "tokenizers-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6cff5c5e37c41bc5faa519d6f3df0679e4b37da54ea1f42121719c5e2b4905c0"}, - {file = "tokenizers-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:62a56bf75c27443432456f4ca5ca055befa95e25be8a28141cc495cac8ae4d6d"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc7de6a63f09c4a86909c2597b995aa66e19df852a23aea894929c74369929"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:053c37ecee482cc958fdee53af3c6534286a86f5d35aac476f7c246830e53ae5"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d7074aaabc151a6363fa03db5493fc95b423b2a1874456783989e96d541c7b6"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a11435780f2acd89e8fefe5e81cecf01776f6edb9b3ac95bcb76baee76b30b90"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a81cd2712973b007d84268d45fc3f6f90a79c31dfe7f1925e6732f8d2959987"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7dfd796ab9d909f76fb93080e1c7c8309f196ecb316eb130718cd5e34231c69"}, - {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8029ad2aa8cb00605c9374566034c1cc1b15130713e0eb5afcef6cface8255c9"}, - {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca4d54260ebe97d59dfa9a30baa20d0c4dd9137d99a8801700055c561145c24e"}, - {file = "tokenizers-0.20.0-cp310-none-win32.whl", hash = "sha256:95ee16b57cec11b86a7940174ec5197d506439b0f415ab3859f254b1dffe9df0"}, - {file = "tokenizers-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:0a61a11e93eeadbf02aea082ffc75241c4198e0608bbbac4f65a9026851dcf37"}, - {file = "tokenizers-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6636b798b3c4d6c9b1af1a918bd07c867808e5a21c64324e95318a237e6366c3"}, - {file = "tokenizers-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ec603e42eaf499ffd58b9258162add948717cf21372458132f14e13a6bc7172"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce124264903a8ea6f8f48e1cc7669e5ef638c18bd4ab0a88769d5f92debdf7f"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07bbeba0231cf8de07aa6b9e33e9779ff103d47042eeeb859a8c432e3292fb98"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06c0ca8397b35d38b83a44a9c6929790c1692957d88541df061cb34d82ebbf08"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca6557ac3b83d912dfbb1f70ab56bd4b0594043916688e906ede09f42e192401"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a5ad94c9e80ac6098328bee2e3264dbced4c6faa34429994d473f795ec58ef4"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b5c7f906ee6bec30a9dc20268a8b80f3b9584de1c9f051671cb057dc6ce28f6"}, - {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:31e087e9ee1b8f075b002bfee257e858dc695f955b43903e1bb4aa9f170e37fe"}, - {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c3124fb6f3346cb3d8d775375d3b429bf4dcfc24f739822702009d20a4297990"}, - {file = "tokenizers-0.20.0-cp311-none-win32.whl", hash = "sha256:a4bb8b40ba9eefa621fdcabf04a74aa6038ae3be0c614c6458bd91a4697a452f"}, - {file = "tokenizers-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:2b709d371f1fe60a28ef0c5c67815952d455ca7f34dbe7197eaaed3cc54b658e"}, - {file = "tokenizers-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:15c81a17d0d66f4987c6ca16f4bea7ec253b8c7ed1bb00fdc5d038b1bb56e714"}, - {file = "tokenizers-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a531cdf1fb6dc41c984c785a3b299cb0586de0b35683842a3afbb1e5207f910"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06caabeb4587f8404e0cd9d40f458e9cba3e815c8155a38e579a74ff3e2a4301"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8768f964f23f5b9f50546c0369c75ab3262de926983888bbe8b98be05392a79c"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:626403860152c816f97b649fd279bd622c3d417678c93b4b1a8909b6380b69a8"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c1b88fa9e5ff062326f4bf82681da5a96fca7104d921a6bd7b1e6fcf224af26"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7e559436a07dc547f22ce1101f26d8b2fad387e28ec8e7e1e3b11695d681d8"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48afb75e50449848964e4a67b0da01261dd3aa8df8daecf10db8fd7f5b076eb"}, - {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:baf5d0e1ff44710a95eefc196dd87666ffc609fd447c5e5b68272a7c3d342a1d"}, - {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e5e56df0e8ed23ba60ae3848c3f069a0710c4b197218fe4f89e27eba38510768"}, - {file = "tokenizers-0.20.0-cp312-none-win32.whl", hash = "sha256:ec53e5ecc142a82432f9c6c677dbbe5a2bfee92b8abf409a9ecb0d425ee0ce75"}, - {file = "tokenizers-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:f18661ece72e39c0dfaa174d6223248a15b457dbd4b0fc07809b8e6d3ca1a234"}, - {file = "tokenizers-0.20.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:f7065b1084d8d1a03dc89d9aad69bcbc8415d4bc123c367063eb32958cd85054"}, - {file = "tokenizers-0.20.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e5d4069e4714e3f7ba0a4d3d44f9d84a432cd4e4aa85c3d7dd1f51440f12e4a1"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799b808529e54b7e1a36350bda2aeb470e8390e484d3e98c10395cee61d4e3c6"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f9baa027cc8a281ad5f7725a93c204d7a46986f88edbe8ef7357f40a23fb9c7"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:010ec7f3f7a96adc4c2a34a3ada41fa14b4b936b5628b4ff7b33791258646c6b"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98d88f06155335b14fd78e32ee28ca5b2eb30fced4614e06eb14ae5f7fba24ed"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e13eb000ef540c2280758d1b9cfa5fe424b0424ae4458f440e6340a4f18b2638"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fab3cf066ff426f7e6d70435dc28a9ff01b2747be83810e397cba106f39430b0"}, - {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:39fa3761b30a89368f322e5daf4130dce8495b79ad831f370449cdacfb0c0d37"}, - {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c8da0fba4d179ddf2607821575998df3c294aa59aa8df5a6646dc64bc7352bce"}, - {file = "tokenizers-0.20.0-cp37-none-win32.whl", hash = "sha256:fada996d6da8cf213f6e3c91c12297ad4f6cdf7a85c2fadcd05ec32fa6846fcd"}, - {file = "tokenizers-0.20.0-cp37-none-win_amd64.whl", hash = "sha256:7d29aad702279e0760c265fcae832e89349078e3418dd329732d4503259fd6bd"}, - {file = "tokenizers-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:099c68207f3ef0227ecb6f80ab98ea74de559f7b124adc7b17778af0250ee90a"}, - {file = "tokenizers-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:68012d8a8cddb2eab3880870d7e2086cb359c7f7a2b03f5795044f5abff4e850"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9253bdd209c6aee168deca7d0e780581bf303e0058f268f9bb06859379de19b6"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f868600ddbcb0545905ed075eb7218a0756bf6c09dae7528ea2f8436ebd2c93"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9643d9c8c5f99b6aba43fd10034f77cc6c22c31f496d2f0ee183047d948fa0"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c375c6a889aeab44734028bc65cc070acf93ccb0f9368be42b67a98e1063d3f6"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e359f852328e254f070bbd09a19a568421d23388f04aad9f2fb7da7704c7228d"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d98b01a309d4387f3b1c1dd68a8b8136af50376cf146c1b7e8d8ead217a5be4b"}, - {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:459f7537119554c2899067dec1ac74a00d02beef6558f4ee2e99513bf6d568af"}, - {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:392b87ec89452628c045c9f2a88bc2a827f4c79e7d84bc3b72752b74c2581f70"}, - {file = "tokenizers-0.20.0-cp38-none-win32.whl", hash = "sha256:55a393f893d2ed4dd95a1553c2e42d4d4086878266f437b03590d3f81984c4fe"}, - {file = "tokenizers-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:30ffe33c5c2f2aab8e9a3340d0110dd9f7ace7eec7362e20a697802306bd8068"}, - {file = "tokenizers-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aa2d4a6fed2a7e3f860c7fc9d48764bb30f2649d83915d66150d6340e06742b8"}, - {file = "tokenizers-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5ef0f814084a897e9071fc4a868595f018c5c92889197bdc4bf19018769b148"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1e1b791e8c3bf4c4f265f180dadaff1c957bf27129e16fdd5e5d43c2d3762c"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b69e55e481459c07885263743a0d3c18d52db19bae8226a19bcca4aaa213fff"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806b4d82e27a2512bc23057b2986bc8b85824914286975b84d8105ff40d03d9"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9859e9ef13adf5a473ccab39d31bff9c550606ae3c784bf772b40f615742a24f"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef703efedf4c20488a8eb17637b55973745b27997ff87bad88ed499b397d1144"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eec0061bab94b1841ab87d10831fdf1b48ebaed60e6d66d66dbe1d873f92bf5"}, - {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:980f3d0d7e73f845b69087f29a63c11c7eb924c4ad6b358da60f3db4cf24bdb4"}, - {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c157550a2f3851b29d7fdc9dc059fcf81ff0c0fc49a1e5173a89d533ed043fa"}, - {file = "tokenizers-0.20.0-cp39-none-win32.whl", hash = "sha256:8a3d2f4d08608ec4f9895ec25b4b36a97f05812543190a5f2c3cd19e8f041e5a"}, - {file = "tokenizers-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:d90188d12afd0c75e537f9a1d92f9c7375650188ee4f48fdc76f9e38afbd2251"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d68e15f1815357b059ec266062340c343ea7f98f7f330602df81ffa3474b6122"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:23f9ecec637b9bc80da5f703808d29ed5329e56b5aa8d791d1088014f48afadc"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f830b318ee599e3d0665b3e325f85bc75ee2d2ca6285f52e439dc22b64691580"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3dc750def789cb1de1b5a37657919545e1d9ffa667658b3fa9cb7862407a1b8"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e26e6c755ae884c2ea6135cd215bdd0fccafe4ee62405014b8c3cd19954e3ab9"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a1158c7174f427182e08baa2a8ded2940f2b4a3e94969a85cc9cfd16004cbcea"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:6324826287a3fc198898d3dcf758fe4a8479e42d6039f4c59e2cedd3cf92f64e"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d8653149405bb0c16feaf9cfee327fdb6aaef9dc2998349fec686f35e81c4e2"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a2dc1e402a155e97309287ca085c80eb1b7fab8ae91527d3b729181639fa51"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bef67b20aa6e5f7868c42c7c5eae4d24f856274a464ae62e47a0f2cccec3da"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da06e397182ff53789c506c7833220c192952c57e1581a53f503d8d953e2d67e"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:302f7e11a14814028b7fc88c45a41f1bbe9b5b35fd76d6869558d1d1809baa43"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:055ec46e807b875589dfbe3d9259f9a6ee43394fb553b03b3d1e9541662dbf25"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e3144b8acebfa6ae062e8f45f7ed52e4b50fb6c62f93afc8871b525ab9fdcab3"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b52aa3fd14b2a07588c00a19f66511cff5cca8f7266ca3edcdd17f3512ad159f"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b8cf52779ffc5d4d63a0170fbeb512372bad0dd014ce92bbb9149756c831124"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:983a45dd11a876124378dae71d6d9761822199b68a4c73f32873d8cdaf326a5b"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6b819c9a19831ebec581e71a7686a54ab45d90faf3842269a10c11d746de0c"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e738cfd80795fcafcef89c5731c84b05638a4ab3f412f97d5ed7765466576eb1"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c8842c7be2fadb9c9edcee233b1b7fe7ade406c99b0973f07439985c1c1d0683"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e47a82355511c373a4a430c4909dc1e518e00031207b1fec536c49127388886b"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9afbf359004551179a5db19424180c81276682773cff2c5d002f6eaaffe17230"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07eaa8799a92e6af6f472c21a75bf71575de2af3c0284120b7a09297c0de2f3"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0994b2e5fc53a301071806bc4303e4bc3bdc3f490e92a21338146a36746b0872"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6466e0355b603d10e3cc3d282d350b646341b601e50969464a54939f9848d0"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1e86594c2a433cb1ea09cfbe596454448c566e57ee8905bd557e489d93e89986"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3e14cdef1efa96ecead6ea64a891828432c3ebba128bdc0596e3059fea104ef3"}, - {file = "tokenizers-0.20.0.tar.gz", hash = "sha256:39d7acc43f564c274085cafcd1dae9d36f332456de1a31970296a6b8da4eac8d"}, + {file = "tokenizers-0.21.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3c4c93eae637e7d2aaae3d376f06085164e1660f89304c0ab2b1d08a406636b2"}, + {file = "tokenizers-0.21.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:f53ea537c925422a2e0e92a24cce96f6bc5046bbef24a1652a5edc8ba975f62e"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b177fb54c4702ef611de0c069d9169f0004233890e0c4c5bd5508ae05abf193"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b43779a269f4629bebb114e19c3fca0223296ae9fea8bb9a7a6c6fb0657ff8e"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aeb255802be90acfd363626753fda0064a8df06031012fe7d52fd9a905eb00e"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8b09dbeb7a8d73ee204a70f94fc06ea0f17dcf0844f16102b9f414f0b7463ba"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:400832c0904f77ce87c40f1a8a27493071282f785724ae62144324f171377273"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84ca973b3a96894d1707e189c14a774b701596d579ffc7e69debfc036a61a04"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:eb7202d231b273c34ec67767378cd04c767e967fda12d4a9e36208a34e2f137e"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:089d56db6782a73a27fd8abf3ba21779f5b85d4a9f35e3b493c7bbcbbf0d539b"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:c87ca3dc48b9b1222d984b6b7490355a6fdb411a2d810f6f05977258400ddb74"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4145505a973116f91bc3ac45988a92e618a6f83eb458f49ea0790df94ee243ff"}, + {file = "tokenizers-0.21.0-cp39-abi3-win32.whl", hash = "sha256:eb1702c2f27d25d9dd5b389cc1f2f51813e99f8ca30d9e25348db6585a97e24a"}, + {file = "tokenizers-0.21.0-cp39-abi3-win_amd64.whl", hash = "sha256:87841da5a25a3a5f70c102de371db120f41873b854ba65e52bccd57df5a3780c"}, + {file = "tokenizers-0.21.0.tar.gz", hash = "sha256:ee0894bf311b75b0c03079f33859ae4b2334d675d4e93f5a4132e1eae2834fe4"}, ] [package.dependencies] @@ -3835,13 +4061,43 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] [[package]] name = "tomli" -version = "2.0.2" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] @@ -3898,20 +4154,21 @@ optree = ["optree (>=0.12.0)"] [[package]] name = "tqdm" -version = "4.66.5" +version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] @@ -3953,7 +4210,7 @@ files = [ name = "types-requests" version = "2.31.0.6" description = "Typing stubs for requests" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, @@ -3965,13 +4222,13 @@ types-urllib3 = "*" [[package]] name = "types-requests" -version = "2.32.0.20240914" +version = "2.32.0.20241016" description = "Typing stubs for requests" -optional = true +optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, - {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, ] [package.dependencies] @@ -3979,20 +4236,23 @@ urllib3 = ">=2" [[package]] name = "types-tqdm" -version = "4.66.0.20240417" +version = "4.67.0.20241119" description = "Typing stubs for tqdm" optional = false python-versions = ">=3.8" files = [ - {file = "types-tqdm-4.66.0.20240417.tar.gz", hash = "sha256:16dce9ef522ea8d40e4f5b8d84dd8a1166eefc13ceee7a7e158bf0f1a1421a31"}, - {file = "types_tqdm-4.66.0.20240417-py3-none-any.whl", hash = "sha256:248aef1f9986b7b8c2c12b3cb4399fc17dba0a29e7e3f3f9cd704babb879383d"}, + {file = "types-tqdm-4.67.0.20241119.tar.gz", hash = "sha256:1769e0e94d5e6d8fa814965f9cf3d9928376dd15dabcbcb784bb8769081092b4"}, + {file = "types_tqdm-4.67.0.20241119-py3-none-any.whl", hash = "sha256:a18d4eb62db0d35c52707ae13d821b5a57970755273ecb56e133ccc0ac7e7c79"}, ] +[package.dependencies] +types-requests = "*" + [[package]] name = "types-urllib3" version = "1.26.25.14" description = "Typing stubs for urllib3" -optional = true +optional = false python-versions = "*" files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, @@ -4067,13 +4327,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.6" +version = "20.28.0" description = "Virtual Python Environment builder" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, - {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, + {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, + {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, ] [package.dependencies] @@ -4087,13 +4347,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "werkzeug" -version = "3.0.4" +version = "3.1.3" description = "The comprehensive WSGI web application library." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, - {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, ] [package.dependencies] @@ -4236,118 +4496,109 @@ files = [ [[package]] name = "yarl" -version = "1.13.1" +version = "1.18.3" description = "Yet another URL library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82e692fb325013a18a5b73a4fed5a1edaa7c58144dc67ad9ef3d604eccd451ad"}, - {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df4e82e68f43a07735ae70a2d84c0353e58e20add20ec0af611f32cd5ba43fb4"}, - {file = "yarl-1.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec9dd328016d8d25702a24ee274932aebf6be9787ed1c28d021945d264235b3c"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5820bd4178e6a639b3ef1db8b18500a82ceab6d8b89309e121a6859f56585b05"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86c438ce920e089c8c2388c7dcc8ab30dfe13c09b8af3d306bcabb46a053d6f7"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3de86547c820e4f4da4606d1c8ab5765dd633189791f15247706a2eeabc783ae"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca53632007c69ddcdefe1e8cbc3920dd88825e618153795b57e6ebcc92e752a"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4ee1d240b84e2f213565f0ec08caef27a0e657d4c42859809155cf3a29d1735"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c49f3e379177f4477f929097f7ed4b0622a586b0aa40c07ac8c0f8e40659a1ac"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5c5e32fef09ce101fe14acd0f498232b5710effe13abac14cd95de9c274e689e"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab9524e45ee809a083338a749af3b53cc7efec458c3ad084361c1dbf7aaf82a2"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b1481c048fe787f65e34cb06f7d6824376d5d99f1231eae4778bbe5c3831076d"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:31497aefd68036d8e31bfbacef915826ca2e741dbb97a8d6c7eac66deda3b606"}, - {file = "yarl-1.13.1-cp310-cp310-win32.whl", hash = "sha256:1fa56f34b2236f5192cb5fceba7bbb09620e5337e0b6dfe2ea0ddbd19dd5b154"}, - {file = "yarl-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:1bbb418f46c7f7355084833051701b2301092e4611d9e392360c3ba2e3e69f88"}, - {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:216a6785f296169ed52cd7dcdc2612f82c20f8c9634bf7446327f50398732a51"}, - {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40c6e73c03a6befb85b72da213638b8aaa80fe4136ec8691560cf98b11b8ae6e"}, - {file = "yarl-1.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2430cf996113abe5aee387d39ee19529327205cda975d2b82c0e7e96e5fdabdc"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fb4134cc6e005b99fa29dbc86f1ea0a298440ab6b07c6b3ee09232a3b48f495"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309c104ecf67626c033845b860d31594a41343766a46fa58c3309c538a1e22b2"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f90575e9fe3aae2c1e686393a9689c724cd00045275407f71771ae5d690ccf38"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d2e1626be8712333a9f71270366f4a132f476ffbe83b689dd6dc0d114796c74"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b66c87da3c6da8f8e8b648878903ca54589038a0b1e08dde2c86d9cd92d4ac9"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cf1ad338620249f8dd6d4b6a91a69d1f265387df3697ad5dc996305cf6c26fb2"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9915300fe5a0aa663c01363db37e4ae8e7c15996ebe2c6cce995e7033ff6457f"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:703b0f584fcf157ef87816a3c0ff868e8c9f3c370009a8b23b56255885528f10"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1d8e3ca29f643dd121f264a7c89f329f0fcb2e4461833f02de6e39fef80f89da"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7055bbade838d68af73aea13f8c86588e4bcc00c2235b4b6d6edb0dbd174e246"}, - {file = "yarl-1.13.1-cp311-cp311-win32.whl", hash = "sha256:a3442c31c11088e462d44a644a454d48110f0588de830921fd201060ff19612a"}, - {file = "yarl-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:81bad32c8f8b5897c909bf3468bf601f1b855d12f53b6af0271963ee67fff0d2"}, - {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f452cc1436151387d3d50533523291d5f77c6bc7913c116eb985304abdbd9ec9"}, - {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9cec42a20eae8bebf81e9ce23fb0d0c729fc54cf00643eb251ce7c0215ad49fe"}, - {file = "yarl-1.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d959fe96e5c2712c1876d69af0507d98f0b0e8d81bee14cfb3f6737470205419"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c837ab90c455f3ea8e68bee143472ee87828bff19ba19776e16ff961425b57"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94a993f976cdcb2dc1b855d8b89b792893220db8862d1a619efa7451817c836b"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2442a415a5f4c55ced0fade7b72123210d579f7d950e0b5527fc598866e62c"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fdbf0418489525231723cdb6c79e7738b3cbacbaed2b750cb033e4ea208f220"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b7f6e699304717fdc265a7e1922561b02a93ceffdaefdc877acaf9b9f3080b8"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bcd5bf4132e6a8d3eb54b8d56885f3d3a38ecd7ecae8426ecf7d9673b270de43"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2a93a4557f7fc74a38ca5a404abb443a242217b91cd0c4840b1ebedaad8919d4"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:22b739f99c7e4787922903f27a892744189482125cc7b95b747f04dd5c83aa9f"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2db874dd1d22d4c2c657807562411ffdfabec38ce4c5ce48b4c654be552759dc"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4feaaa4742517eaceafcbe74595ed335a494c84634d33961214b278126ec1485"}, - {file = "yarl-1.13.1-cp312-cp312-win32.whl", hash = "sha256:bbf9c2a589be7414ac4a534d54e4517d03f1cbb142c0041191b729c2fa23f320"}, - {file = "yarl-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:d07b52c8c450f9366c34aa205754355e933922c79135125541daae6cbf31c799"}, - {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:95c6737f28069153c399d875317f226bbdea939fd48a6349a3b03da6829fb550"}, - {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cd66152561632ed4b2a9192e7f8e5a1d41e28f58120b4761622e0355f0fe034c"}, - {file = "yarl-1.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6a2acde25be0cf9be23a8f6cbd31734536a264723fca860af3ae5e89d771cd71"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18595e6a2ee0826bf7dfdee823b6ab55c9b70e8f80f8b77c37e694288f5de1"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a31d21089894942f7d9a8df166b495101b7258ff11ae0abec58e32daf8088813"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45f209fb4bbfe8630e3d2e2052535ca5b53d4ce2d2026bed4d0637b0416830da"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f722f30366474a99745533cc4015b1781ee54b08de73260b2bbe13316079851"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3bf60444269345d712838bb11cc4eadaf51ff1a364ae39ce87a5ca8ad3bb2c8"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:942c80a832a79c3707cca46bd12ab8aa58fddb34b1626d42b05aa8f0bcefc206"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:44b07e1690f010c3c01d353b5790ec73b2f59b4eae5b0000593199766b3f7a5c"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:396e59b8de7e4d59ff5507fb4322d2329865b909f29a7ed7ca37e63ade7f835c"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3bb83a0f12701c0b91112a11148b5217617982e1e466069d0555be9b372f2734"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c92b89bffc660f1274779cb6fbb290ec1f90d6dfe14492523a0667f10170de26"}, - {file = "yarl-1.13.1-cp313-cp313-win32.whl", hash = "sha256:269c201bbc01d2cbba5b86997a1e0f73ba5e2f471cfa6e226bcaa7fd664b598d"}, - {file = "yarl-1.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:1d0828e17fa701b557c6eaed5edbd9098eb62d8838344486248489ff233998b8"}, - {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8be8cdfe20787e6a5fcbd010f8066227e2bb9058331a4eccddec6c0db2bb85b2"}, - {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08d7148ff11cb8e886d86dadbfd2e466a76d5dd38c7ea8ebd9b0e07946e76e4b"}, - {file = "yarl-1.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4afdf84610ca44dcffe8b6c22c68f309aff96be55f5ea2fa31c0c225d6b83e23"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0d12fe78dcf60efa205e9a63f395b5d343e801cf31e5e1dda0d2c1fb618073d"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298c1eecfd3257aa16c0cb0bdffb54411e3e831351cd69e6b0739be16b1bdaa8"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c14c16831b565707149c742d87a6203eb5597f4329278446d5c0ae7a1a43928e"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9bacedbb99685a75ad033fd4de37129449e69808e50e08034034c0bf063f99"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658e8449b84b92a4373f99305de042b6bd0d19bf2080c093881e0516557474a5"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:373f16f38721c680316a6a00ae21cc178e3a8ef43c0227f88356a24c5193abd6"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:45d23c4668d4925688e2ea251b53f36a498e9ea860913ce43b52d9605d3d8177"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f7917697bcaa3bc3e83db91aa3a0e448bf5cde43c84b7fc1ae2427d2417c0224"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5989a38ba1281e43e4663931a53fbf356f78a0325251fd6af09dd03b1d676a09"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11b3ca8b42a024513adce810385fcabdd682772411d95bbbda3b9ed1a4257644"}, - {file = "yarl-1.13.1-cp38-cp38-win32.whl", hash = "sha256:dcaef817e13eafa547cdfdc5284fe77970b891f731266545aae08d6cce52161e"}, - {file = "yarl-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:7addd26594e588503bdef03908fc207206adac5bd90b6d4bc3e3cf33a829f57d"}, - {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a0ae6637b173d0c40b9c1462e12a7a2000a71a3258fa88756a34c7d38926911c"}, - {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:576365c9f7469e1f6124d67b001639b77113cfd05e85ce0310f5f318fd02fe85"}, - {file = "yarl-1.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78f271722423b2d4851cf1f4fa1a1c4833a128d020062721ba35e1a87154a049"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d74f3c335cfe9c21ea78988e67f18eb9822f5d31f88b41aec3a1ec5ecd32da5"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1891d69a6ba16e89473909665cd355d783a8a31bc84720902c5911dbb6373465"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb382fd7b4377363cc9f13ba7c819c3c78ed97c36a82f16f3f92f108c787cbbf"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8854b9f80693d20cec797d8e48a848c2fb273eb6f2587b57763ccba3f3bd4b"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbf2c3f04ff50f16404ce70f822cdc59760e5e2d7965905f0e700270feb2bbfc"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fb9f59f3848edf186a76446eb8bcf4c900fe147cb756fbbd730ef43b2e67c6a7"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ef9b85fa1bc91c4db24407e7c4da93a5822a73dd4513d67b454ca7064e8dc6a3"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:098b870c18f1341786f290b4d699504e18f1cd050ed179af8123fd8232513424"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8c723c91c94a3bc8033dd2696a0f53e5d5f8496186013167bddc3fb5d9df46a3"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44a4c40a6f84e4d5955b63462a0e2a988f8982fba245cf885ce3be7618f6aa7d"}, - {file = "yarl-1.13.1-cp39-cp39-win32.whl", hash = "sha256:84bbcdcf393139f0abc9f642bf03f00cac31010f3034faa03224a9ef0bb74323"}, - {file = "yarl-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:fc2931ac9ce9c61c9968989ec831d3a5e6fcaaff9474e7cfa8de80b7aff5a093"}, - {file = "yarl-1.13.1-py3-none-any.whl", hash = "sha256:6a5185ad722ab4dd52d5fb1f30dcc73282eb1ed494906a92d1a228d3f89607b0"}, - {file = "yarl-1.13.1.tar.gz", hash = "sha256:ec8cfe2295f3e5e44c51f57272afbd69414ae629ec7c6b27f5a410efc78b70a0"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, + {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, + {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, + {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, + {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, + {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, + {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, + {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, + {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, + {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, + {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, + {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, + {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, ] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" +propcache = ">=0.2.0" [[package]] name = "zipp" -version = "3.20.2" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, - {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] @@ -4367,6 +4618,7 @@ datasets = [] faiss-cpu = ["faiss-cpu"] google-generativeai = ["google-generativeai"] groq = ["groq"] +lancedb = ["lancedb"] ollama = ["ollama"] openai = ["openai"] pgvector = ["pgvector"] @@ -4376,4 +4628,4 @@ torch = ["torch"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" -content-hash = "0a58b82476e4794adbc5768d38911e1935a9c3941cc40499001199383fc8c8ec" +content-hash = "a32560a472d4f6230349b9c13273f2462d0bdd560600f051f220a4cb41eeed7f" diff --git a/adalflow/pyproject.toml b/adalflow/pyproject.toml index 8b1a68f6..bf6c9c48 100644 --- a/adalflow/pyproject.toml +++ b/adalflow/pyproject.toml @@ -57,6 +57,7 @@ anthropic = { version = "^0.31.1", optional = true } google-generativeai = { version = "^0.7.2", optional = true } cohere = { version = "^5.5.8", optional = true } ollama = { version = "^0.2.1", optional = true } +lancedb = { version = "^0.5.2", optional = true } # Azure dependencies @@ -78,6 +79,9 @@ openai = "^1.12.0" groq = "^0.9.0" google-generativeai = "^0.7.2" anthropic = "^0.31.1" +lancedb = "^0.5.2" + + [tool.poetry.group.typing.dependencies] mypy = "^1" @@ -108,6 +112,7 @@ faiss-cpu = ["faiss-cpu"] sqlalchemy = ["sqlalchemy"] torch = ["torch"] ollama = ["ollama"] +lancedb = ["lancedb"] azure = [ "azure-core", "azure-identity", From c6bfcdf8adba39f17fcc1a0a141ace040c50ab8c Mon Sep 17 00:00:00 2001 From: fm1320 Date: Thu, 12 Dec 2024 00:38:19 +0000 Subject: [PATCH 34/40] fix end of files linting --- docs/source/use_cases/classification.rst | 7 +- docs/source/use_cases/rag_opt.rst | 7 +- ...adalflow_classification_optimization.ipynb | 463 ++++++++++++++++ .../tutorials/adalflow_rag_optimization.ipynb | 495 ++++++++++++++++++ 4 files changed, 964 insertions(+), 8 deletions(-) create mode 100644 notebooks/tutorials/adalflow_classification_optimization.ipynb create mode 100644 notebooks/tutorials/adalflow_rag_optimization.ipynb diff --git a/docs/source/use_cases/classification.rst b/docs/source/use_cases/classification.rst index d0aaa489..0ba09159 100644 --- a/docs/source/use_cases/classification.rst +++ b/docs/source/use_cases/classification.rst @@ -1,10 +1,9 @@ -.. -.. Try Quickstart in Colab -.. - .. raw:: html
+ + Open In Colab + GitHub diff --git a/docs/source/use_cases/rag_opt.rst b/docs/source/use_cases/rag_opt.rst index 73b824bf..072fe3f5 100644 --- a/docs/source/use_cases/rag_opt.rst +++ b/docs/source/use_cases/rag_opt.rst @@ -1,10 +1,9 @@ -.. -.. Try Quickstart in Colab -.. - .. raw:: html
+ + Open In Colab + GitHub diff --git a/notebooks/tutorials/adalflow_classification_optimization.ipynb b/notebooks/tutorials/adalflow_classification_optimization.ipynb new file mode 100644 index 00000000..0afb97df --- /dev/null +++ b/notebooks/tutorials/adalflow_classification_optimization.ipynb @@ -0,0 +1,463 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# 🤗 Welcome to AdalFlow!\n", + "## The PyTorch library to auto-optimize any LLM task pipelines\n", + "\n", + "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", + "\n", + "\n", + "# Quick Links\n", + "\n", + "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", + "\n", + "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", + "\n", + "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", + "\n", + "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", + "\n", + "## 📖 Outline\n", + "\n", + "This is the code for a classification optimization tutorial ![image.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA+gAAAJYCAIAAAB+fFtyAAAAAXNSR0IArs4c6QAAAERlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAD6KADAAQAAAABAAACWAAAAADDsFQWAABAAElEQVR4AeydB5gURRqGe5clLJJUMnqIiCiKYBbBnOMZThEDYMCcc1bMeurpKYZTDHcqYsSM2RMFwawcZhEUliRZ4rJ772xJ0fSEnZ2Znunu+ebhWaqrq6v+eqt75qu//64uqa6udvQRAREQAREQAREQAREQAREINoHSYJsn60RABERABERABERABERABGIEJNx1HoiACIiACIiACIiACIhACAhIuIdgkGSiCIiACIiACIiACIiACEi46xwQAREQAREQAREQAREQgRAQkHAPwSDJRBEQAREQAREQAREQARGQcNc5IAIiIAIiIAIiIAIiIAIhICDhHoJBkokiIAIiIAIiIAIiIAIiIOGuc0AEREAEREAEREAEREAEQkBAwj0EgyQTRUAEREAEREAEREAEREDCXeeACIiACIiACIiACIiACISAgIR7CAZJJoqACIiACIiACIiACIiAhLvOAREQAREQAREQAREQAREIAQEJ9xAMkkwUAREQAREQAREQAREQAQl3nQMiIAIiIAIiIAIiIAIiEAICEu4hGCSZKAIiIAIiIAIiIAIiIAIS7joHREAEREAEREAEREAERCAEBCTcQzBIMlEEREAEREAEREAEREAEJNx1DoiACIiACIiACIiACIhACAhIuIdgkGSiCIiACIiACIiACIiACEi46xwQAREQAREQAREQAREQgRAQkHAPwSDJRBEQAREQAREQAREQARGQcNc5IAIiIAIiIAIiIAIiIAIhICDhHoJBkokiIAIiIAIiIAIiIAIiIOGuc0AEREAEREAEREAEREAEQkBAwj0EgyQTRUAEREAEREAEREAEREDCXeeACIiACIiACIiACIiACISAgIR7CAZJJoqACIiACIiACIiACIiAhLvOAREQAREQAREQAREQAREIAQEJ9xAMkkwUAREQAREQAREQAREQAQl3nQMiIAIiIAIiIAIiIAIiEAICEu4hGCSZKAIiIAIiIAIiIAIiIAIS7joHREAEREAEREAEREAERCAEBCTcQzBIMlEEREAEREAEREAEREAEJNx1DoiACIiACIiACIiACIhACAhIuIdgkGSiCIiACIiACIiACIiACEi46xwQAREQAREQAREQAREQgRAQkHAPwSDJRBEQAREQAREQAREQARGQcNc5IAIiIAIiIAIiIAIiIAIhICDhHoJBkokiIAIiIAIiIAIiIAIiIOGuc0AEREAEREAEREAEREAEQkBAwj0EgyQTRUAEREAEREAEREAEREDCXeeACIiACIiACIiACIiACISAgIR7CAZJJoqACIiACIiACIiACIiAhLvOAREQAREQAREQAREQAREIAQEJ9xAMkkwUAREQAREQAREQAREQAQl3nQMiIAIiIAIiIAIiIAIiEAICEu4hGCSZKAIiIAIiIAIiIAIiIAIS7joHREAEREAEREAEREAERCAEBCTcQzBIMlEEREAEREAEREAEREAEJNx1DoiACOSYwC+//FJSUvLII4/kuN5wVrdzzSdN2+F29dVXp1k4V8WyHK/33nsPs/mbK3tsPf7VbJtQok4E/D4/PRfL9OnT//a3v6299tq0e8cdd/h6Pqy33noDBw6sEw0VFoGCEJBwLwh2NZohAb6+U3wykA6LFi1CJ6V54Kuvvkrr7du3r6qqyrADITnM/EA+88wzHntPP/10CHgy87+J0Dz22GM7d+7cqFGjtm3b7rjjjldddVX+zYhvccKECZxOmBe/K1c5v//++wUXXNC1a1f6vtZaa+21114vv/xynSp/4okn0EB1OiQ/he+5554wTvYY8RRfSihR6KEIbZmGDRtuuOGGV1555ZIlS9xgbQF34uSTT3aX4cI85JBDOOcbNGjQunXrAw444LnnnnMXiE//9NNPRx55JIXLy8u7dOly2WWXxZdx53zxxRdHH330uuuui52cYLvvvvvDDz+8YsUKd5m8pc8555zXX3/9kksu+c9//rP33nvnqt3Ro0czanPnzs1VhapHBPJJoCyfjaktEciSAF/ftoZ///vfb775pjtn4403tnvTTCDcBw8eTGHz+5r6qMcffxyvDLLsnXfe4fcsdeFi3tuxY8fFixfXr1/fDwg//vjj1ltvjQo57rjjGI6KiorPPvvs5ptvNuPoR4vp14lwxwzOJQyzR73xxhs2nWXiu+++22233WbOnMm8ZauttkJ5cE6i3s4///y///3vaVaOcB8/fvzZZ59ty2c5XkycGG6kpK0wswTCvWXLlm6vZ65qzsyeNI9CSW+wwQam8MKFC0855ZSDDz6YTJPTpk0bk0AHP/jgg6TnzZv3wgsvXHvttUhqhs/sNX/32GOP/v37u3OQ+HaT2ek111yD+D7ppJMYMqZwuBIOPfRQKkGa22LuBCqcs7FDhw7nnXcefuvJkyf/+uuv7gKeNBYyVcDmY445hoYWLFjw9ttvH3/88Vxll156qaewH5uei4Vv2r/+9a+c3qYtaOTkTEO4c51yprVo0cL2gourtFSuTMtDieASkHAP7tjIsngCuIJs5kcffYRwd+fYXX4k/vjjD35ub7zxRvxP/FLmX7hjwBprrOFH13JeJy5D/ME5r9ZU+I9//AN5hCJBu9gmZsyYYdNBS2SvaE2Pli9fTtjAnDlz3n///W233dZk4pI86qijbr31VnR83759M+t7luOF3PFpuP2rOTNQCY/arOZjds2aNQvhTkb891JZWZnNPPXUU7fffvthw4bdfvvtVtlTA8LUlvG0xe0vVDsnAPMuOyXm3gsOaU4MT2GzyY1B9PdGG2307rvvMtFNWMadyTcqqr1Xr17MB5o2bWp2McH75JNPmOm5S/qX9lwsXNdube3r+cDMyr9+qWYRyCWBan1EIJwETjvtNK4Eazs3c5F03bp14/uX+8Innnji7Nmz7d6PP/54zz33xOeEwsAbisOSXRMnTvRcS/i07CGeBK59fjbwPOHcbdasGY4fdwE2ORYfFa1zIxuXG45hUwDDiEzYdNNN2YVDkdgGjGGXaZ1pgLse7LE2kGDzf//7X79+/fj16tmzJyW//PLLAQMGdOrUidr4yacjaAV3Db/99huu6Hbt2vETSE/5JV66dCm+PapCJbhLfvjhh2SiA9yZJs0vPbuefvppzy4Pc9xjvXv3bt68OTMKNAd3tE15T9cwmAIYhvOMBBDw/1VWVtrK6QJ6Ba1AVXgcEeW07iFjCwOQftnNhAmUR58+fRo3btykSZN9990X2eEu9vzzz2+yySYA5C+RBpjHHMAWSH0iUXK//fYbNWoUXn9qYCAeffRRcywGY7b7A0Z27VTzMWUYiyuuuGKLLbbgFMI8jMSnaHaZvxxuTwB3Pml0HntRb558/O6cHugzk2/G7sknn2Q4OENoBZc8rlazF1vcFpqOJxyvSZMm0VPGi9iwu+++m8O/+uqrXXbZhQr/8pe/MHe1ZpgWTWfjIdAcjZrCDz30EDW0atWKk5P7Y7jYbSVY4jbMHOKu2ZR86qmnoMdVzLXMjIWTytZQ62lmS9rEkCFD+MbAGK4XxDSTIrsLAzg9uPrwWKN6gcCFb/cmS3AzhF7Ej6CxzX2U8SLj+rWZHMj1ZTc9CcaXwJX58+d78lNsvvbaa9TJtUAZpv3uKy7hUcSiMLtg3BPuNZnu3nHvkVkKFz7DgW1MKjiR7LHLli0jHIV7EVwm7OWLgq8Ls5dvUbzd3AeAPN+WBx54oD0Q7HwoFn8ikRl/PjDZ2GeffTj/OS27d+/ON61pIsX3pPlepSP2Y1rnDGSYzOH85TuTHq255pqMPvNkAtLsLmPG8OHDr7vuOnpBB3fdddcffvjBFlBCBHwlII+7vXiVCDcBbh8TIIuQPfPMM/kiRmp8/vnnaFO8U7htUO3IhYsvvpiveH5vTGAoOffee6/71jausmQUUCpoDn5mjjjiCOp56aWXDjvsMFMYqbf//vtzT5ldZ511FveXuRWAWCQImwLcaMYwfl1OOOEEfjvRfPzY4B9N1pAnn1aYD9xwww18EbCLmn/++We6iSWoin/961/8pUKcpuydOnXqNttsg5Jj3sIv/ZQpU3DUEQ60/vrr88NJF3DQ2vrZRCsjpm1OnRK0S68hhpTkp4uJipkJJKwERAhufv/wDb/11lu33XYbcCBPYfyCKMtx48axic3c1uDnM2ElJpPfV2pA7/JjmbAYUyxqoDmUFn1niNHHnAzIfcqjHoguQK5x84RgA0ius8467npSnEimGD3lF51hpRWUKBJkyy23ROQR18G5989//pOgAhO1FR+7hfAiGoGZ2KBBgzhPhg4dip30nVmZ24aEaU458j2hFOQw22EQmT9gmI3ZuP766zklLrroIk5+1Aw3iJgOIUEIcSZUA73LLJdjmdgkbIvx4oylR7fccgvnCc82oOA5Fq1MEMh9992HGbhmmbd4DucQd/QaKvDyyy9nIm2KMRaAQqghEOkOWpnRN7NBjDzjjDOwxwRhu/3QtglzgTNlYux4bPHOO+/klGNkuahNmRSnma3EJpCVxEtAhhOPMAlsY0ZtvjFMGXQ8Wpb+Hn744VxHwEQagsXWkE2CbyEORxe6KyHqnUmsO4cJHuoWUfjtt98yIbeOcHeZZGkuE3ZxbfJt8+mnn1IPDgUmS8jo+EO4UvgGY/iYlcXvTZgDLiYefOlxBdEdADLJIVoMDU158DJMfOnxjcRpj9uekDZigdjFBci3B8PNJcn5yXcaE0tzedqGzInEHYP48CFbhgP5CmLSxbcu34fffPMN8po0BVJ8TzKg33//PdNgLgGcCBTmh8DWaRKcXdwSgQlXNFNELi5OWs4BANqSN910E64cJmBcUFwmXBpjx461e5UQAR8J+DotUOUi4B8Bt/cXNcxFgsKwzY0cOdLm4GElbfzctoBJJPOQeYrxPY7UeOCBB0w+3+lIJVsG9Ub9Hn82ioQC6Et28e1vC5Mwu5hdsAvHknsXOdZdZzxDiDx3AX5L3JvGC0vshMlETvFb4umpae7++++ncn7bTEn8YfxouT1M7mqNSym1x90oPwC6DzRpT9eMEHe7ijfffHPErin87LPPYph1laG9jCL3kLGtMCMy9/0Ru/xIjxgxAm+i3YsaRsYhi23OtGnTkLY2h6P4pWduYwqYmFrjeCYn9YlEAUpirQWO7EAYcQPB1AYx9kLPbJq/1onIJjM3nO52L9IQhYogszkcbk8Am2kSWE5HPJlmk3OPA1988UU2zdjhCLTeWbzU7EXmmsL40W1/TU7C8TJzRQpgJMCZBuDFN+URkW47TYueXlOS21CMMr5qPKzmQM/Zy6SFKaXZxV80vfG22hx3zZyxTAC4c2VvdpmncnnK05RPfZrZOk2CgUPIMp/nfDM5TPXpFNey2cQSNnmQxmwyakhDFKfZTPY32fcJtjHzYS8f5ldMX+FJX8y1aWqjufgPFzh7mc2yiysuWbsJ89GaHGVuTSA6udXDlxjfXe5G7YG4qCnMBWVzEiYoY89Pz2iOGTOGvZZYjx49ONPiK+F0ohiPZMTvIgfsfOwuSrrvQrjPBy4l5o2cyVRoy9uueWzzfE+aB0I47e2BJKiKYTI55gkQvg3MJt8qtMXUwpwtxgym5fZa5uLC1K+//tqU118R8JWAHsXgctMn9AQQTGgafDP4q8wHxYD3znzDGoccP/PJgkFr7T+SBUHMz7YpiZjmNrT5BSIH6YkIxoHkrse4wNlFwkhwu9fsspupE8S6uAsYzUqOcc5tt912pHFl8ZcfLVQs3muPO980h9eQO9rMbUxthMYCKllArSmT+q+hiqSg3dQlzV53R3bYYQfuG5h8pljcFUFYm004mylZsjqRdziPsRwnH7+XBx10ENqXOZUpj6cNUc4ArTwRZtWrVw9PvzkTUJAcy88zZ4spzzmD9922lfpEMsUoj/0mja+OBV5sX2w9yRIYg15kL9AI5UJ8MFhm+JIdYvNRD8kcriYfpW4LM4Wzhbk/wFyFkAm7N50EvlJTjIGmj+hOTiGTwyaZtfYahzpShksAyWsOtGcvTkoGCIlGJaTTsQeXLWqbOjmNTXl0IbdoXnnlFffhyU4zdxnSeKOZCaDPON/MLs5A3Nvu2vgCsRcIo4bnuNYue1pxbzK95Gzhw10R3LTcAePa8XwV4A7gBHZ/uMtHJWZk7YC6q02R5lEQ9nKD4rHHHuO7i5kzT8TiI8ezHn9UBk3Y0eR7lZtX9Iuzwp7MpHGrc6/A0xZHAZPlcez3p6dAmpvcaUF5M4I0ZA+xPK1t8d+TtnCKBBcLw82dOlOGM4F7mHzhcD/BHsXNOnMtk2O+ELI5PWy1SohArQQk3GtFpAIhIMDPAz//OOTMT6P5y+8Wv/RYjz7gd4vb4shrfhpx5eIpqVOv+OXje5wfJ7xlfHAY86tv3KvUQzQkUgZvVnyd7MLjmPDedHzhhDmeaATUHl4xpCq/THTT7DXSB2cev7648RLWw88bmp6IdrMXBY9TNlm0ScIaPJk8Con4QN5hDLfLceumUPCILay1NRAhYH+2iaZAVprb66aAjfew5T0JwmqJx0D5EXWNYxjy/KyawAAjFOiX+0zArW7OBNqiKkKP3BUydnYz9YlkinliCdx9sfWkSHDbnfgigOAKxUiUYprKFd2Gdk9Ys8l3Czt3H1EzIDWxGQkPj8/0jBfzHMIhrCqiPDl2BOMPJ4c7PFxod911l5lbmjIEohCawhyAs5G+m4VK0uy+GTv3YFEnwt3km/o9ZqcYmvjaEGG4/921ebqcojbTeuq/2GYUOVhw1nJCWnFpD6RF+Lg/XFzsZUbB32Sjbw7nzpL9cFOCTFM/k1hTgL9m8Rm0u82xiXSasIVNgla43WEWjuSrlQFlzmxHk3kCm1yqxBfxEC2XqjmKO1TEsOH4oGsmFguzPTWns2me20n2dZfiezKdyjkNPGeaCXtznx7u7wHODapNfUWk067KiEA6BBJIjXQOUxkRCBQBJCOq3bqTrW38lpBGcHCnmEBwwmrxNBOZQIw1m/hRbMkUCcQcwScUcIshNmkOvZjiwNS73DLIlOQ+bPwhnl93vJ787vJDSOAE9tNxwnBTKGZ3hXhhmWxwOD+lhFXgvLTuRncx0ogM/pqff/cubkCbXWRiGBEjeLKRnnjNeVQLuYxExqnsPsSkE2bGF6tTDnXSET4EW+OYZDhQPAYFst56eU2dCadV8c2lPpFM+fi+cFc0vqqEOcwAiYnnLgEjyBlLVcQBGwmSsLw7E+nA7QKigd2KwRQwqsh968B9YAbp+D7G56ToNVH7TC+Z1LkvELrJWpZIbQJ7UHsIZfyahH+kefam04t4I9M5KlmZ+NpSdDlZJTaf2jg/zSYxQnDgaQouQ1sgRYLC7OX2RYoyzH7tXuYGnGa4DMgx0t/s4pQjkVBfMrXjGkndhK3fJLjHSEP4vLkAmcjxhcYE3o4mopwR564C3wk818FA82iEuY3DITgRuD3ItzEBPFwChBTiDfHUn81mNt+Tabab29MjzUZVTAQgIOGu0yAKBHjSEYcrDmCPzHX3Dc8fHx7aw+vMg0REv/ArEq+e3YeYNIqQWA60oPub+oMPPuAxRKOiaJ3HkrhfTDHP4ezixwn3T7zT3ThpcErZQ9zuHJvpTvCLy21ubh3g6DL5xsFs0sxScJulWLgNiU8ZukPoCBKcB7/clbvThHuyyRN77kyTY3aZfHQ/UowPUgzPN48VouOtOvEcm2yTCjkKe6zTnXsayQonzDehQYTBsBfg/EWgJDTDGO+GRmF3N9M5kRLaYDJrPZ2YQOLW5dloW9ITRpWicp7DI1SXGGIe93QX4zYL8ght575T4e4jchOk9sFr27S7khymufNDcA4TS9ZscVfLtJk7XUhVO/Fg3N0FUhtmz0n3bSLGzn1CumtLnba1MRymJPfQCL1IeNqkriqDvYhsHhPnQsZ9wJdSrTXgt8YBzCgTG5bM3YA739ZDOBlpwgUJIeMJdZvPw+ukjTvDZpoEVx9gEdAs9M60yrM34SYnM1FnOEHMXoJS3N9mZPKlRzwJH25+ouN5XNUId3ZxofFkCB9OVE4VKmFOm7CVZJnmSufrLn7IUn9PUmHqM40CnB7urwVyzHMdmZ1sybqgfBHIjIBCZTLjpqOCRQD/Cu5qIjjdZhFAbH5I+B53u8r4naCYiZYxYtHze+OuhDRKlxBGIkOQI/aDx5Rd5pkn4nAI2zAPt9ljTYvsIsEvtM0nYXYhsrm/jNPa7mLBB5tOmDAzB3dfeKbTlkRG48pFHhENbDNJ2PJ41LhvTkwLq3PgqLZKzl3YpBEWUOKn1E2GhSnQGXZVDWYj7gPdVN35tabxPjLnsUHqeOw8gs9TA0+MeZ5VMNHb5tY2tQGWWYSnDGqSeky/CFaxN/SRO+641dQnkseS+E2CQMh0Q/OU8Ywg8z3zSJ+nWMJNzj186qxl4R5fcLEoCme4ZwKAvreRFQgsZjV24DDSdj9hQ9lkchnidkUEE9puI4BNhZ6+YwP+WndbGJYCHdMz5mN4bW2cG+EWPGxNpLu7kjTTqD3MY+5trw5W+MGkzGpLs1F3MdzVfPkwmu7MFGm+QwjVQ/jyteYuhj/bPKRLj+zHeN8JCyQuBcjWC47nm2PN0i7uSkzaPHXKfN4Ex9sCXPhcMnbTJhhQS49MwqLc9wyx1pZkssGs0gwcU3Qkvt2F/ibEy46pza81waqgBAryBeg+Z4w9njONqtzfk2zWep2yhix3jey1yfMJrN/Fw6k5vKlVawdVQASSEZDHPRkZ5YeJAFHs3HfmliuxBKwUgecbRw5hITiokDv86qCJWcmLHwnUDBoRbcdXMz3EQ893MWEe+LTwDxEx6QmaRFrhrWQ5PA8OAsT55UDTs0gcISjopHPPPZfveiQ+3/K4/wlE4YeTEA5+CNEH2GNiWtCdZJoK+Rnml5u/iBIUPIuUeVrxbGK2iQpFlWIAv9lmPRBbDMFKJjQIUSCyArkGBG4O2Oe3MBVj8HQSZmqPSpjAiY4IRo6be+4oJH66EASsDm7KE8OKzQgdvFAE7EKYCF37OFfCOhNmMtng+QF8b3DGbYxH1kwJknnFsBwlwZpuZuLBw3DAZ+y4/079IGJZOpgzOihInIvcFSGYh7sxZmbFSYLN2EnEFA2hNnBPWqWS+kRKaL87E1yIBixEAqKZcGGa4ARbBq857nZORWxg7JChnH62dVssYQKhiQTn/gbG48XknEGycPsIAtCjs+6jAGKKsSASqgXZZB//xRHLCc/pymOLKCqCFtwHZpmmR3hteULU7U0nWgOxyIVJF2iOS5UucxkCx9wnMY1iGGPH2thYyy63Z50CXNSApeOMEfNPsxwkWsq9wmn6xnNicCajhrkqWX0F9yonMEDs06jpV5VZSZ5woC80ypVl4qeph28Aj+PZoGMXjgPiWLhhyEOZdJ+LDmVMiBq34OyDKx5LiBbjJhh35+gjVxnrxsCcY+mmp6TZZMEZ5sx8cXEZcgURGci3JU+RckkyKPGHcDJzH5IgGc5hNC5fenTKFiOT1SEZU05FppqcuuZLjz5yDjNDpgCuBJb8Yig9Z6+tJEUCPwVnC6cTFx0k+WrCKc7jsNzhrPV7EquoGTi0y3lFJUbK2+ZY8BenDHNdFgTDfn5BuFqZi9KoLaOECBSMADNUfUQgjATcy0Ea+1GWfCOjxXHh4FG+8MILuTXMLpQNP1fcoEdLIQj4veGHxHaZmG+OQlJwERqfk91FwqwVQ7CmO9OkufPLIfwcsokbiZ8BPED8DPB7yWzBHoKHjNXH+C2kCeQCPwboTlMDR7EcOL98GMwvmXmA0tpgfKi4it1NswI3sg8hzlEs8W7ufdtDKEm8DeqchugsYQBQwpvlrgGdys+P+8017r3uNP51WBHSw+8r8wQmGO6jUAzMTIijpV/8hTA/yeZwM53A1Wc2uZ/O76K7ZtM1m0MfeWwOCHSKeYJZD94uPmiLmQR76RTzKwpDm2HlEEvblEE1MuugABH5zNYo4B5xfoCRSvBBOsS/gIkakp1I7EIwoblNK+YvOpKPzUEbgd34/DCD/Jr9fxbA98nkikponaBefKXAYZNi5pPwJFy5M/Y/JwmaG2lLDZwGOFnRVe4CRjEjOxCmnO1cDhjMWWHLIJqhbeZypulax4sucNrYGkhwoOVgWjSdNSNLL9wfywdTzYO5CG5UuFlHldZNzTykSJ2cBhxrDnHXbMow5YAbfUdOEfDmPiFrPc1MDe6/zOW4MDmL0MfmxoXdG99lz0jZku6EubHjvh7N3njbyOek5TxhlynjJmbTFp0pYy46hpVLkmscxUn8jNmV8C/nG1NTvBL0kQAYgqy4GZKwpM3k24nTgyuaQ7j2EdnIVlzppgCG2d5xnwfFzG1Dpn9cbuhmzgrbHbQ+E3JOM85AIDPlME1zc5Lrlxy+E7hCCdvjHqBtnf66u0xzFLZ7488HvBLMCTlnqI1Ti86awpwYqb8nuT3Ld5oR4uYMdBtPJYwOX+PYz3cIHeFS9ZiBT8TmeK4gm6+ECPhBoIRK7XeEEiIgApEngO5B9KAAAttTnlrjR5efZNzkeTASWY9bsU6LruTBqoyboC/c0kFVIDsyrkQHioAIiIAIBJOA7vsEc1xklQj4QgDHM9FEuOR9qT3TSt3L1+Dbw23GzW5iXTKtT8eJgAiIgAiIQDQJKMY9muOqXomAhwDLL3ATnNUbCAYlXtazt7CbxCOh3VlUjqgeYlcIXiKehDvshbVKrYuACIiACIhA0AhIuAdtRGSPCPhCgIfDeJyUpVeIfrZrsfvSUt0r5TFEZhREkbLcBNHbeNzjnwaue606QgREQAREQASiRkAx7lEbUfVHBERABERABERABEQgkgQU4x7JYVWnREAEREAEREAEREAEokZAwj1qI6r+iIAIiIAIiIAIiIAIRJKAYtwLMKysrcvy2yw9m+wVMwWwSU2KgAiIgAiIgAiIQN4JsC45b/vi7QF6xVU67CXc06GU4zKodt6FkeNKVZ0IiIAIiIAIiIAIhJPAr7/+yuu3w2l7Xq2WcM8rbtOYeTUg5yiLVZPD6+t5TT3vA+dNdQWwRk3mjoCGMncsC1xTMQwlS+Z//vnngOadXOZVrwWG7k/zxTCU/pALXK0aysANSaYGeYZy/vz5eDONNMq0yiI6TsK9AINtImRQ7Va4N27cmLSEewEGI6dN8mWkocwp0YJVVgxDySvoR40aBeI+ffo0aNCgYKx9brgYhtJnhEGpXkMZlJHI2o6EQ6ng4TS56uHUNEGpmAiIgAiIgAiIgAiIgAgUkoCEeyHpq20REAEREAEREAEREAERSJOAhHuaoFRMBERABERABERABERABApJQDHuhaSfsG2eGCP8K+EuZQacAANXVla2ZMkSBjHgphaPeURva4mx4hlu9VQEREAEok1Awj1A48tSptOmTZs7d26AbJIpdSHACLZt25b1gvSQTV2w+VsW1d6pU6cIP3zpLz7VLgIiIAIiECQCEu4BGg2j2lu3bs3KJFJ+ARqYtE3h1VoLFy5s0qSJXLxpM/O3oHnZWUVFxV/+8hddU/6yVu0iIAIiIAL+E5Bw959xei0QXIGvHdW+9tprp3eESgWOADKRJfYaNWok4R6csWnVqhWvPKusrNRyq55BIayrX79+ZJLw7NKmCIiACIhAMAno+zoo44KwwBR87UExSHaIQCQImCAZJsYS7p7xZHq54YYbejK1KQIiIAIiEGQCWlUmKKNDeDSm6G5+UMZDdkSFgK6pqIyk+iECIiACIuDI466TQAREQASKkQB3Ib7++mt63r1793r16hUjAvVZBERABMJGQB73sI2Y7HUReOSRR1q0aOHKSCuJC3bEiBFpFU2v0I477vjEE0+kVzbQpTLjmU6XJkyYsM466/zxxx/pFFaZ/BBAuL9Q8yGRnxbVigiIgAiIQJYEJNyzBFjgw1dUVY/56fcXvpjCX9I5sWbnnXc+++yzs6kq+xqStb7eeuvdcccddm/fvn2///57u5lmgjVG9tlnnzQL11rsxRdfnD59+hFHHPHee+/htlxzzTX5y9zA/WFXrfXYAumoZx6B/fvf/77FFlusscYazZs379Gjx+WXX84jmLaS3CZ++eWX448/nkUVy8vLO3fufNVVV2FAwiZYw/60007jAWuW1jn00EMhY4p169Ztu+22u/322xMepUwREAEREAEREIF0CChUJh1KAS0zcnzF4JcmVMxbYuxr17zRVQd023vTdgE11wez0JF86loxS63X9ZD48jyTgJ+S5Tj++c9/HnvssTznt/3220+ZMmXBggVNmzY955xz5s+f//DDD5sD11prrfgaMs5ZunTpnnvu+dVXXw0ePLh3796smjJx4sRhw4bdddddN954o6daFHb2S5h/++23LJhz//33b7DBBuPHjx80aBC+81tvvdXTFpt0/JVXXnn66aeZTpx++umHHHLIhx9+aIpBiQMvueQSrWESz005IiACIiACIpAOAXnc06EUxDKo9lMe+8yqdkycNm8JOeRnY+7AgQP/+9//3nnnncZhjKuV2tBquKjxobZp0+aYY46ZNWsWmXiRUYSjRo0yzd1yyy2sZYmHNWENpoznLw1ts802DRs2bNeu3cUXX2zW1aEMDns0Hx/EX8uWLa+44grz5C75kyZNQhoa2yjpdk5fffXVPXv2fOihh1ixG1NPPfVUhDVWIdMx7Prrr7etc7gJleEQt1+cNBVSDJGKAjYOZpzZzzzzjDmWLlPmtdde23LLLTH7gw8+mDlz5jvvvHPAAQdQABq0BSL+Mp2gAAk++OAvvfTSDh064B3fdtttjfcdz/Qmm2xy4oknmpp/+ukn5D7GsxeBO2/ePGMYFpoC7r//+Mc/aJp2zzzzTCyhvzvttNN99913ww03mGIGILdNoLfXXnuRiaubOGYMWHfddSHDYvO2QrpMDSxndPDBB//+++82353Ye++9mYQwW1h//fUPPPDA888//7nnnnMXMGnMHjp0KG3tuuuuGMYho0eP/uijj8zePfbYY/bs2Qx6/IHKEYFQEqha4Uwc5Xz9TOwv6Rx+/KsZI0NaedWKkkkfdJg9hr85ph1mJn6dgX4zyeHFUnxVyeMe0DFHqi5envSXgKiYq178nycyhs0Sx7n6xQm9N2hZr5Rk4k95/VggR+J9joNkJ/hk0003veaaayiDN5fV5dFhJ5xwAnpx8eLFF1100eGHH45qRB0iDdHxX3755c8//4y8xs+KbI2vIWFbOKf33XdfVP6///1vHLr4Yln+3OrURx99lNiMcePGffLJJ6hblCUFEIvIaDZJJ6wT+YuqHjlyJIm//e1vWMVqdyhF5ONxxx23++67o5vdByJATz75ZJPz+OOPX3nllVtttRWbqPbHHnsMKdylS5f333//6KOPhgPi2JRkjoGzGQmLIqdyJO/GG2/srtaTZgZChPeTTz7Zvn37559/HhHME4HUTIvYs99+++2///40ga7FSBzkxAJhyXfffUc9zEA8tbGJc53Cm2++uWeXe1gBeMopp1hvNzcEuDPAVAQmCPcLL7zwnnvu4fCxY8fCmf4edNBBcCMGxlNnwk0EesJ7CJ9++uny5cvhbI7aaKONGLgxY8YQJEMOExtmVsz0dtttt4TVKlMEwkRgwovOyIuc+Svj05q1d/a+2el2YA664F/NGBfSymvMLps/NfYFPeleJ4e0Q87ElzPQbybUr08WBCTcs4Dn56Go9m5Xvl7XFtDu0+Yv6X71GykOnHDNXo0bJB13nNwILMQormJTyd13341GtN5cvMJ4bRH3aOLrrrvuzTffREnjkh8wYAC+WA6JryGhMQhH6qFy5CYKj/hspgQIVvPqInYxT2BX165dkbmkEeuIRcLH8Uxb2zw14ynHPAoQUb3LLrugfV999VUqpJKbb7753Xff9Qh3ZLFRxniFiRFH7DJjIRCFzr711lu9evWifgQ67m2iRKxwZ0qDbjZNcweAuYqx2WOM2Zw8eTKOZ/6i2slhqoA+JocmULEAZEZEfDz1vPzyyxQAPgDpeLI+Ugb4zJpImA+eckaB9GabbcYUxWQyMeBuw58lHMc+tMBDAjTKdMUId2ZZTCTQ8ZRkQDkc8+xRCRM//vgjMTkJ42R49S/2ux8XBg6Zth4g0FO7qYQIhJUAOvKp/o7DN+7Kz/yKWM7h/85Wu/tXM5aGtPKQmu0r8PAyWXnF6P+MCSQVcBnXqAMjRgCHOpLXCFzbNVza6DxUGm5j9GLHjh3R1nZvOolvvvkGZWydxMRqE7/x22+/4aPlcHy0dhfFbrvtNuJeal2xDlWKajetIxkpbyU1mzNmzEhoGKoafzOSmjsJFECYLlq0yEpzcvCCu93bxitvquIWBDcKElZrMpl1YDmsbBkmBvbluOeddx5BO8xeuFFgM21Jk0Di8zFpPPeGj7sMEpyIcxzq3Byw+USq2DQJ5iG41bmzQeQ9IUkE6tBHpmeMArrflgR1auHOfRKE/mGHHZbspoetKmGCCCLaTbhLmSIQGgKEmuBrd6v2mOmI+JJY/vo7O6WZrq1Jza8xi3bNB3JVM/WEtPKQmu0r8IIxudjZaL/MT+/YyaxPDghIuOcAoh9VENCCazxZzeMmzh748MfJ9j5y7NbbdEr6NCQ1JzswYT56mhhuPNbuvUSlm03j4iV2mQ8h1O4y+U+7X42J7vds4o+PNwnJy40CBKsJDaKAif/mCUui0m15AtZt2t1NgsjnzJljd8UnqI35AzEk7lmHnQUxl8B9zq4ffvgBQRx/ODl4x82MgrRx2+NNN4E0prwZC0/sittIHlQgGofIGQL9KcYNBMJjmI0g3BO2mCyTuyLcx+AZ3H/9618Jy3CXgGqJrbJOd555cN864CRhUZqExyoz/wR4SpiIMtrV48J1gz9p9Kr4hNWOrI7l37Tuanm52fCvZuwLaeUhNdtX4H4zmeJw8nfaITcntWrJlICEe6bkfD4O3ZkioGWHLq1YQ4anUT2eGULX2zZvxN4UMe61Go4fHSexLcaag88++yzO7Phfd/zuPCr6wAMPDB8+nFAZ3LrGw+2pwVblThAXTrWE8hvPOtHYOMtZ6tuUIfbaFiaOBalqhG86NdsDa03QOsHlCPr//Oc/1sFPmA0yHTe8jY1JUQ+eeEJB0O7EuycsRgFgItB32CHBlx1B7TwziozGgU1ouImV9/QRqc3HXXm/fv0I7Pn888/d9wHcBTxppg30kbsWZnSeeuopW4AWPajtLk8CXzuq3Txyau9jeMqwl8nS22+/zUKQ7GJ2AUYmRbYYIVVGKdocJQpIgHHkCekCGhDWphdOD6vlslsEsiSgkz9LgLk4XMI9FxTzXge6nJUfWUMGpW61O2k+5Gej2qkBjY6Yw02LYxjJyLLcSHPEIpHQbBJJwnOWDz74ICVRvSxawiooeIsRoEjDCy64IL6GhDqPRyR5CvOMM87g2U0UHo9FnnvuubYkgo/Nk0466bPPPiOimpqp1tRMQAhB4WhrvN0mM+O/PAvLZOONN97AL86HeogvZ/5A2AwTEsRunz59eBCTSUWzZs2YmcQ3hHTGDArg0o7fSw5BMkcddVT//v3pAoVZhQZdS3ARz6QOGTKEBzdZ1ZGAfhz8FGOKgmqHP8ZQjCdxcYrH+8XNkos84gk05gPMGXDbE2zjduq7jWENR54ZBSN3TjCVh27tXtalIUiJgPW//vWvr7/+erI4GVQ7UfUERFGSLpjDjSudXVjCE8YsEAQ9JiEMHOcJxBhcVLt5MpVDOKMobB9dtTYoIQIhI9CkTSqDj3rG6bh9qgIp9uHOfDx2DyTxJ5uaqTGklYfUbF+BF5BJ6pM/8Ymr3BwTkHDPMdC8Vcd67fcevYV7HXd87TlZxx3ZikjF8UwANwuEoyNRezw5ymqAxGej3pDpKOxrr73WPlJJtAbhE4h7yiA342uIx0IgCk+OIvQpj85D8OFFtsVQurSOFkSMnnXWWXbZRAJaUPOEW2CJWSPSHpJBgjVhkMjEfthjeWyUhW7oGsvIEBTOGixEfXDPgfUcbRl3AvOYtxDon0y4U5g6eR6UcHZkKyofIUthws3pO4snotopQ5w6ap6VeQhJwh7CY3i3FIszIs3tSju2XaLqkfVMe6iZZdGZYLBcDOt1IuhtGXcCwizRSM0U5iWv9Au8pgDGMCujFR4LRlIzBPTdfaxJ8/ArEzY+9pYI+YY/UwLmXTZynUcdODfwuDNATOrMI7CmEhbD4fTg/ImvXzkFIcCZw0MONM2NFztnLoglIWsUXc6qJjyNusptYnpQEsvvvGvmQcAc61PNGBjSykNqtq/AC8gk40lpyC7yQJtbkr36CXT/AmkcDwjim8SVi1cSA5E+SFiWXOTRTBRY6ocdPR1iXUji3WcsWNK6aSPi2rP0tXsqL9Qmzl1WXEGYFsqAOrVLqAzxBtwZQJKihBhchlUyyMOQ2HfinZ544gkc/J5dfm/yJC7zz7peWeaqZMVS95MSfpua5/oZFGZxNMqMjls9eW49b835MpTxa3rEFuN1creqDHWtfjM1+/VqqPJPs8NWeUjN9hV4eJmAZaXssV+wHlFUU0R/khLQC5iSognFDpR6r85r/7VnB/5GQ7WHArvbSCJGcJwT2+POVNpDAD7ctci/aveYoU0RyA0B1mvf9YrVqsJTnhNtTc3U0+zPp/9jTeSqZqoKaeUhNdtX4OFlEjun9cmKgEJlssKng2slQNQHLzPyFCM43h1p7dkbuk1WkwydzXk2mDh7PnluVM2JgI8EGsXulzodtna2O9kh8JcQgoxXgfRYiSZj0T3imHkQMLc101BIK68xu/Ln978Y9XrPHfYqW3/HnNEOOZNQnieeE16bdSQg4V5HYCpeRwJEpRPy7jnIxAh5Mu3me++9Z9NKiIAIiEAQCVR8EbOq8y5O9+SPk2ZsN3MA/xbdC2nlpfWqO/aZ8r/5PTr2yaVqN2MUWiZhPU8yvjR0IAv4CoII+Eqgdc3H1yZUuQiIgAjkm0DFl7EW2/XId7tqTwREoLgJKMa9uMdfvRcBERABEagrgcqlzozYgjwS7nUlp/IiIAJZEpBwzxKgDhcBERABESgyAjMmOFWVTvmaTvM/3xlXZP1Xd0VABApGQMI9hp5X4bBaOeswbrvttuPGjUs4GqxO2LVr1/LychbeZsFs1pgzxXgvJstvs9gcu1hfnGWwtcJmQoDKFAERCBQB3kLAi7f4JHt1V6CsDZYxFV/F7CFOpsS8+C5Y1skaERCBCBNQjLszfPhw3vXIIieodtQ5b43hhTIEZrtHnfWnL7744oceeoiX4/CWSt7RU1JSwkttKMN7be69995HH32Uxbw/+eQTXsfDGu28kNJ9uNIiIAIiEDQC6HVemBA0q8JhjwLcwzFOslIEIkhAHncH/T1o0CAEN+8KRb7zhnkEumeoR48ezRLURx55JI553v7IK0KtY55duKx4gz27/va3v7HX7vJUok0REAEREIEoEJBwj8Ioqg8iEEoCxS7ceXfgp59+ysvezejxwkvSY8aM8QwmjnaKGUX+888/86JT3vhlyrCL98/jhmfzyy+//OCDD3j5vOfwsG/yKtOzzz47pL24+uqr6+pW/OWXX7ij8sUXNcu95aLbnGasYs4cLxeVqY4EBI444ojbbrstwQ5lJSfAi3754uJDInkp7YkjsKLSmT4+lttO9yvi4ChDBETAZwLFHioza9YsgtTbtGljOZP+9ttv7aZJ4GunZJ8+fYhfr6ys5KVCvAbS7CKEhrf1brTRRtx3pqrrr7/+qKOO8hzO5tKaj8mnPAlexM3HJPhLtVTOL2jdfkSrVjiTxzgLpzlN2jp/6ZX71W2Nxbx9u8a2lVuB/p+BePbZZ+1LkYiDOu200+pEtUOHDlOmTGnZsmWdjgKKebwhnhXBVNyQ2W677UyFHgvzT/ORRx4By+zZs+vaNFMaHuQwR6255prdu3dnnf4ddtihrvXkvDzXI9PL4447jkA1T+UwZ0S41sDu2ZVi031tpigW6l3MJ4cNG0YXeNNCgwYNQt2XFMbnfihnflu/ckl1gyaVTdfhezxF09qVWwK5H8rc2qfa0ibgGUqzmfbRxV6w2IV7muPPK4FuuOGGe+65hzj4H3/88ayzzuIhVJ5J5fCnnnrq8ccfJwieGHd8tHim27dvP2DAAE/NN9544+DBg92Zb7zxBmE5Ngd3bNu2bRcuXMivqc1Mnaj/42vl7w0uXVhhilU1abd456uWb5B7fz+TCqwy843UJgVk7+LFi93W1q9f372ZjpEMzaJFi9IpGV9mwYIFJpOJHJ57Pnfdddcll1zitsFjYXwldc1hgNLXXjxajZZ125Nmc5yflBwxYgQz1d9//x0n9wEHHMCjHZ5nQtKsLYfF/vKXvzA1evDBBwl781QLGWi///77nMaeXbVuvvnmm7WWCW8Bzk9j/Ouvv16nWU0Yu5zDoVxn9odbOs7v9Tt8+NrIMKIIu805HMqwowi7/XYoM/61DTuBDO3n97uYP/jB+cV6/vnnLYT+/fsfeOCBdtMk8LXjlLKZ//nPf1hDhp89ctZZZ527777b7kLQs/iM3bQJpNK8lZ9ff/2V0cKFj6Tg88cff6CEcH/+73//I021aX3Gj6i6qnnVVc2qV/6r2Wy+YvyItA5PWQhJd/TRR6+xxhrMJf7+97/vtNNOPG7LEVxdeGqZmaBrt9lmG2KETDVDhw7F0/nCCy9suOGGkDnkkEMQrzwq0LFjxxYtWpx++ul005Sk19RMJsV4DpibGyY/2V/mRTx7gCSlKiyxxdhkItS3b18swR6UsdlFvr0SSJN55ZVX9ujRw+w1g3vdddchNDGYKBpOgPPOOw/nMV52ZJ8p9tNPP1EJwVFscoit0CRMrxOiQBqyQhE1c0ZtvPHGnFpUNXbsWEKw5s6dayrnL/VwT8Bu2sT999+PIG7YsCGnECeVzb/gggu6dOkCMRYvuuyyyziXzC7TNY5CszI9IJOa2eShCwoTnIMZthKbwH53j6iEXWmOi5sMR5loItsKoWKMKacNeLnvNH36dNPoK6+8wiMiYFlrrbWIMSM2w+Sjp0899VTOMbqM8uZulcmfOHEi8wHqadq0Kc+NTJ061eSb/nK7gJHl5buHH364myqjyXVqSrr/ck1xZXFK11xt6f4xVyV/0z0ghOWYhgGNj/EXhLAHaZmc86GsfOUCvnUrX74greZVKHcEcj6UuTNNNdWNgGco+QHiVwmJZCWTEikIFLvHHUW45ZZbImVMZAV31UkjNN3KhjQqDe1lM413CqwJdyWMr0Ca8LE1kMANzMfmlJWVob1o5c+GqHx5co8vETIjLyI0wx5OoiS2WVLy+sVO511SxczUb0wp94Hx6YsuuggPJUIcBUYQwmeffUaYOIYh3ydMmPDkk0+ilZFriLCvv/4aTckuEKE12YVkR7gfeuihqHMeBuCRANIoKkQ2DRHM8MMPP7z44osIL1rZf//9qdDNwW0M0pnYZYQFx3JHApFH+ApL+pgyt956K7YRqoG/kBsdiN099tjj448/xuaHH3547733ZpgwDKqUN1RJv/vuuyzoSe8+/PDD448/nucZdtxxR7Q1iwudcsop6E5mYqYwf/n885//ZOEg0+JNN91EaAETCfITojBhJKBgjsE0YO2110aVmvkMstVUYv7W1L3qjCKT+zb0FIabb775559/jue4SZMm5tYNrFCrMIc2+WxeeOGFHEJ3uP/DQDz33HOms2QydbzllluAw2TmmGOOmTRpEnLZNGr+MhasnoQIZvUkcmgFY9IcF0pyiDEe2f3YY4+xyTqq5KCheT7khBNOoHJ2MbiM3TvvvEMBNpnvbbbZZghE2uV8QPFzCJ196aWXmJuh2pnN8iGTy+fggw/Gqv/+979MhAhz4llwbnlRD/1l5sDJ8/LLL8+ZMwfhTk+R++ziw90wbotxy9VzoVEnB9ZcbasuN3NIrX8zO6rWagNSwHyDYUy0u2lo57KPNQHu9TpsXs/1BR6QMS0GM3I5lMXAK8B9tENJIsBmBs+0FKK+SHahNfmlRxihIE888UTk5rRp0+g7oof4dQPhqquuwvmHaEOGEuKCPkM0mF1IK5y1KAnchOgnlCWiKjU6ppWcCHZyyUQVjzseQQxA4vx57NKF1pWe4wQ1p/ygvJnPIKdMKSIicN8SHYQERB0S/G2P3m233YgAYROhTI8QkWbXSSedhCOceswmapgc0rhaKYZiNvlMsqnZNmQy3X95tAAtbnNwPCOazSY+V6S53YWy55lgs0kTaFm7i7HD4242GSzjhjebaH3is00ajYiLlyFmk6GkEtSz2WX+4iNHofLwMZvJUODoxePOsQhTeyzodt11V7tJwmOh2cVJRcCVLYb+7tWrl920CaYETDXNJl3j+27GjBl2LzVffvnlZtOEtbz22mt2r00wXkwk7Gb642LIMGqwQg3THMZwAlMVBrOkkq0TFc5e5gY2xyRmzpxJPjMQNs844wzImBh0W4zri9Ns8uTJJgdnOeV5LpxN+st5xZVidnE+INbtgfj7KUkUvs0xCa6p1a4sz+4km+aqNF1LUiT02dxuivnba+47hb4zyTuQ46HkRuv1HWLfydPGJ29Te3whkOOh9MVGVZoWAc9QekRRWlUUcaFi97jzS4/mQ0zgCESv41ceOXKkeVYV6YCvjgJ8EEPIFP4iW1u1asV9fOvnw69JsDvOYPQTPlEUKlWZo0L6F6cmFxWSyNiPvxaBSxqxhTAlGMb2ix9+nMpmE0VlH1sE4HrrrYfT1OxiEzikv/nmG24s2Jo5lprJtBV6Euwi6sNmEm6BNxcbEHZkomvtLtLsspspEjyKYIcVwzbddFNTmDqxx9gZfzginokcHmJsYG9qFEx78C7bShCOKH67mTDBfUOwcwfAhmgzkbBOeu4G4PinAFqcfDzuthLmIZyQdpOEbRptTUnTI3rNZIO9TFSQ8u7ypFOMC9OhUaNGUYaGjIYmjT2E9IwfP545KjNe4yxBN3M3ww66aQKbOWG4x8JFwW0NpmrmfhQXF+S5ecLEjHOAORj3XtD9xhhuifAxNTBVYy6NhVtvvTU5nFdMoc2udu3auceL6QT53O4we/VXBHwhMGeis2yBU9bIaRn7VtRHBERABPJMQMI9BpzYmPjwGHN33owHchNvH5/44UFGIBnTVI3xhyfNIaDl0qlJ904a7Tz+t6R7j3rG6bh90r3UnNEH1Yi6JXzF6GZThxVq7ltdJjLBNsJmwvAhWyCfiQzsZEbHYw8EgSCsjampUaAgjTfaFOYmDEI/dR+Nd/yBBx6wsxrKG84E8xAvTkA/Ny6Q8twgcq97iDr31OzpoCFPzJJ5bN+oW88hKTYJ+GHiQQF3tahq4qP4MIsgrAUFzz0rusCE1oYVmTrR1iTIR/fTO2a22INkZ2ZI/hZbbIELn4nEW2+9xS0sIm2eeeYZc2Cyv24zPOeVWSTHM41JVo/yRSBDAmYF9zabOPX065khQh0mAiKQDQF99WRDz89jiUNo4NVkq9rrvKvTrL0zn/VkYnEXrk9JLJ+9pTGfdGYfHOfII/yjRB5TA8HEhFLwfCqx13i78XFmvPwfz2si9aiZxe+pmSAcQilwqSazk/LE1di9pHHf2mnDRx99ZHeRprDZxHjstLuyTPAkKF5/HMzmRbmmtmQoEs5PKMxykNzWc6t5j1X4/hG1BGLFryVKcD+ql2dSzSHGce45vNZNanCX4Z6AG1GKcSEMzH2gJ82To7jSWW3pnHPOQYUTTYRHnFmuu5gZZVS7OW2INXLv5Z4At7z4UBV+d8Q3xtSEu/9qnO5EuRA9n+IksbUxf+D5BKZJNkeJ1AS4lMxLJ+w1lbq8C3X1xgAAQABJREFU9sYI6NVLOg9EQAQKSmC1n9iCWqLG60IAXb73zc5TLHhCnLHV7rGYY2fvm7JR7VSAEx3XMgHEhI7woCeS0cSWIJqRlayygscXMUp8EQ/yEpjBW2PTNB0fLSKYaBBWPuFOBY8QoAvdwTCeeljvhQAJgqcRdjieiVRBI9oy6HieTeSpYpaUevrpp1m6xOxCO2IYMS24gVkuxpbPLEHsEzqSCk1wNpUQO5QMRcJ3b+2yyy54o4kzsWE5VIKn2SzJYqyCDD51HnjFp45+JQaJNRaZMvFMJ7sILMHRDgr6SPh+Zh1xHwUiTKJTRP8T41TXcbFVMRXBZoKkocRTpKhzHiQlfgZEPPCAzTjsGQJOpH/961943+kIg24PZy5EJucSJxgjyIO8RMXgd2d5eM407mIxzSMIjUnjVlttZY9KliCkxwTbJCugfA8B9DprQ3kytVkLAQn3WgBptwiIgL8EVlvXwt+mVHtuCXQ70Dn8306zWCjCnx987eSQn/WHJyDxjxLhgIpiERIeQDRV8lAjwh09TVwyipklXIxXPv0GqYHaCGg2T14SwuEOfvDUgxOXR1fRf0hePLssIGOXlKEkZqBukX0s74gEJJLEHM68AimPv5Zdngoz2GRtk4qKCjy+SEzzwQVOPemjQLYST8KiMe7WUeSYZz/E0BOKg8ylWmQrUpXY8U6dOnEIUTr4swnl4gEMmjZvD3BXlUGaOx68RIzpEIElTH6ooU7j4m6R530JwmFOxR0DplI48lHPdIF1flDhKHI+jCARVgwiHeHUsoczeaN1RDlzEh4q5WSgMJMB1uFB7rPaD6ff+uuvT0i9PSRZghsjPOFtnxBIVkz5IpAVAZ4sN8K97aqHWLKqUAeLgAiIQB0JlHAHv46HqHi2BFgWA8cqj1GbpwzRPUgWltf47bff0Gq1Psi4WvOsC0m8+8LpTpM2sbj2LCJkVqs28Bv4jJGGfAJlKaEyDC7DigB1G/bVV1/xFCZPatpHAtx7lc6eAMFI3ItgRZr4qtD03N+o65VlrkoWPE0xsYxvK1w5nK7cA8Fmpt+eMzZcHUltbS6Hct5vzj82cUrLnEumOPVreeI8tVXamwGBXA5lBs3rkNwR8AylRxTlrp1o1qRQmZCPK0q90w4h70P0zSegiKc2kY+4oqPf20L0EHnN+k6FaDnEbRKJ9Oijj9IBFnXlsYcQ9yRvpht3e6uNpdrzhlwNiYAIeAis5hf07NOmCOSHAKHhuKI9H16mk5/W89MKQT5S7f6hJtDILFrqXxOqWQT0ZKrOAREQgYITkMe94EMgAxy75qCbhed9n+5dpAmJ9uRoUwREQAT8JaAnU/3lq9pFQARqJyDhXjsjlfCbQOo1B/1uXfWLgAiIQFoE/hTuejI1LVoqJAIi4AcBhcr4QVV1ioAIiIAIRIvAwhnOAl6dUeK0+fN1y9HqnnojAiIQDgIS7kEZJ/N2noRv8AmKibJDBEJIQAtnhXDQAmlyxVcxs1p2cRo2CaR9MkoERKAoCChUJijDzLIYrMg2depUltZmhYcUb9kMisWyI44A865ly5ax/mCEF9eL63SgM1DtvDmLqynCqzoGegCiZFzFF7HetOsRpT6pLyIgAqEjIOEelCFD6rHUNO/6QbsHxSbZUUcCyMTFixeXl5dr3lVHcj4WZyzWWWcd3hLqYxvhrBomvOIK2wUnrQHUq5fSwqRCIiAC/hKQcPeXb51qx9HOm1BYXJnXT9bpQBUOCAFeKvH+++/zyk/5dwMyIpjBWEiYJhwOsPTu3TvhLmUmIKAlZRJAUZYIiEC+CUi455t46vbMPX3JvtSUArsXJcS8i3ffagQDO0YyTAQyIbB4jjN3UuzAdlpSJhN+OkYERCBXBCTcc0VS9YiACIhAmAjwSAaxeVjcrl07PZVRy8hN+zpWoEVHp3zNWkpqtwiIgAj4SUCryvhJV3WLgAiIQFAJcHeId5/xIRFUGwNjl+JkAjMUMkQEipyAhHuRnwDqvgiIgAiIQG0E9Oql2ghpvwiIQH4ISLjnh7NaEQEREAERCC0Bs4h7u56h7YAMFwERiAgBCfeIDKS6IQIiIAIi4AuBZX84s76P1axF3H3hq0pFQATqQEDCvQ6wVFQEREAERKDoCEwb7zjVTtN2TpPWRdd3dVgERCBgBCTcAzYgMkcEREAERCBQBPRkaqCGQ8aIQHETkHAv7vFX70VABERABFITMMK9rVZwT41Je0VABPJBQOu454Oy2hABERCBoBHgfWE77bQTVunNsrUMzbQvYwUU4F4LJu0WARHIBwEJ93xQVhsiIAIiEDQC6PWdd945aFYFzp7Kpc6Mb2JWSbgHbmxkkAgUIwGFyhTjqKvPIiACIiACaRGYMcGpqnTK13Kar5NWeRUSAREQAT8JyOPuJ13VLQIiIAJBJVBdXT1z5kysa9WqVUlJSVDNLLRd9tVLQlTooVD7IiACEJDHXaeBCIiACBQjgeXLl99b8yFRjP1Ps89aUiZNUComAiKQFwIS7nnBrEZEQAREQATCSODPd6b2CKPtslkERCB6BCTcozem6pEIiIAIiEAuCKyodKbz9iWeTO2Zi+pUhwiIgAhkS0DCPVuCOl4EREAERCCaBGZ971QucRo0ddbsFM0OqlciIAJhIyDhHrYRk70iIAIiIAL5IfDnq5e6O6X6rcwPcbUiAiJQCwF9GdUCSLtFQAREQASKlICeTC3SgVe3RSC4BCTcgzs2skwEREAERKCQBKZ9FWtdr14q5BiobREQgdUIaB331XBoQwREQASKhABvTu3VqxedJVEkXa5bN6uqHC0pUzdkKi0CIuA7AQl33xGrAREQAREIIAH0+p577hlAw4Ji0pyJzrIFTlkjp+WGQTFJdoiACBQ9AYXKFP0pIAAiIAIiIALxBCq+iOW12cSpJw9XPB3liIAIFIaAvo8Kw12tioAIiEBhCVRXV8+bNw8bmjdvXlJSUlhjgti64mSCOCqySQSKnYA87sV+Bqj/IiACxUlg+fLld9Z8SBQngVp6rSVlagGk3SIgAgUgIOFeAOhqUgREQAREINAEqqsdCfdAj5CME4EiJSDhXqQDr26LgAiIgAgkJTDvN2fxbKe0zGndLWkZ7RABERCBvBOQcM87cjUoAiIgAiIQcALG3d5qY6esYcAtlXkiIAJFRUDCvaiGW50VAREQARFIg4BevZQGJBURARHIPwEJ9/wzV4siIAIiIALBJqAA92CPj6wTgaIlIOFetEOvjouACIiACCQhIOGeBIyyRUAECktA67gXlr9aFwEREIHCECgtLd1qq61om0RhLAhsqwumOwsqHKck9vYlfURABEQgSAQk3IM0GrJFBERABPJFoKysbL/99stXa6FqxwS4t+ziNGwSKrtlrAiIQPQJyNES/TFWD0VABERABOpAQHEydYCloiIgAnklII97XnGrMREQAREICIHq6upFixZhTOPGjUtKSgJiVSDMkHAPxDDICBEQgQQE5HFPAEVZIiACIhB5AsuXL7+15kMi8p2tWweNcG+7Wd2OUmkREAER8J+AhLv/jNWCCIiACIhAWAgsnuPMnRQztp2Ee1jGTHaKQBERkHAvosFWV0VABERABGohUPFVrECLjk75mrWU1G4REAERyDsBCfe8I1eDIiACIiACgSWgd6YGdmhkmAiIAAv4CoIIiIAIiIAIiMCfBPRkqk4FERCBABOQcA/w4Mg0ERABERCBPBOQcM8zcDUnAiJQFwIS7nWhpbIiIAIiIAIRJrB0oTPrh1j/2vWIcC/VNREQgfAS0Dru4R07WS4CIiACmRMoLS3t0SMmT0lkXkvEjpw+3nGqnabtnCatI9YzdUcERCAaBCTcozGO6oUIiIAI1I1AWVnZQQcdVLdjIl/aLCkjd3vkB1odFIHQEpCjJbRDJ8NFQAREQARyS0AB7rnlqdpEQARyTUAe91wTVX0iIAIiEAYC1dXV5p2p9evXLykpCYPJ/tuod6b6z1gtiIAIZENAHvds6OlYERABEQgrAVT7jTUfI9/D2o0c2l251Jn5Taw+hcrkkKqqEgERyCkBCfec4lRlIiACIiACISUwY4JTVemUr+U0XyekPZDZIiACkScg4R75IVYHRUAEREAE0iBgA9wVOJQGLRURAREoCAEJ94JgV6MiIAIiIAIBI/CncN8sYGbJHBEQARFYRUDCfRULpURABERABIqXgPW4Fy8C9VwERCDoBCTcgz5Csk8EREAERMB3Aisqnen/i7XSrqfvbakBERABEciUgIR7jNyQIUPWW2+9Ro0abbvttuPGjUsI84477ujatWt5efm66657zjnnLFmyxBTjQFZSc39OO+20hDUoUwREQAREIKAEZn3vVC5xGjR11uwUUAtllgiIgAg4jtZxd4YPH37uuefed999qHbU+V577fXdd9+1br3a+66feOKJiy+++KGHHtp+++2///77gQMHotRvv/12TqGPP/54xYoV5lwaP378Hnvscdhhh+nUEgEREIGAEygtLe3WrRtGkgi4qfkwzwa4i0Y+cKsNERCBDAlIuDvo70GDBh177LEgRL6/8sorCHRkupvo6NGje/fufeSRR5KJi71fv35jx441BVq1amVL3nTTTZ07d95pp51sjhIiIAIiEEwCZWVl8jKsGhoj3NvqydRVSJQSAREIIIFiF+7Lli379NNPL7nkEjM2eJ523333MWPGeIYKR/tjjz1GFM0222zz888/v/rqq8ccc4ynDFVRBuc9znjPLjaX1nxM/vz580nw0hPz3hP33/gDlRMiAhrKEA1WalM1lKn5hGhvmkNZb+rn3HeobL0pr5MNUe+KytQ0h7KomIS0s56hNJsh7Uv+zS524T5r1iwCXdq0aWPRk/7222/tpknga6dknz59eEl4ZWXlySeffOmll3rKjBgxYu7cuUTRePLNJi8oHDx4sHvXG2+80bhxY5vz5ptv2rQSoSagoQz18LmN11C6aYQ6XctQVlftN+ULhPv7P8xb8Nuroe5p5I2vZSgj3/8IddAO5aJFiyLULd+7UuzCPU3A77333g033HDPPfcQB//jjz+eddZZ11577RVXXOE+fOjQofvss0/79u3dmTaNUx9nvNnE484TrnvuuWezZs3IYa7J6UtwfP369W15JcJIQEMZxlFLaHMxDCU3CW+99Va6f/755zdo0CAhhwhkpjWUs38q+2JJdVmjHQ4+zinVz2JAhz2toQyo7TJrNQKeoTRhCKuV0EZyAsX+DdWyZct69epNnz7dIiLdtm1bu2kSaHRiY0444QQ2u3fv/scff5x44omXXXaZfahr0qRJb7311nPPPec50G42rPnYTRLIdLdS92y6SyodLgIaynCNVwproz2U3D80fY92N9Pq48zYQpAlbTat37A8xfmgXUEgUAynaxA458EGO5Qk8tBcZJoo9sUE8DNtueWWb7/9thnRqqoq0r169fIMMPdxrEZnF1qfv/Znj/TDDz/MQjT77bef50BtioAIiIAIBJ2AXVIm6IbKPhEQgWInUOwed8afCJYBAwZstdVWPHjKcpB4080KM/379+/QoQOx6ZQ54IADWHxm8803N6EyOODJMfKdvch9hDuVsEpDsZ9Q6r8IiIAIhI6A3pkauiGTwSJQrAQkNJ2+ffvOnDnzyiuvnDZtWs+ePUeOHGmeVZ08ebL1sl9++eWsFcPfKVOmsP4jqv3666+35wxBMhQ+7rjjbI4SIiACIiAC4SBAyJCEeziGSlaKgAjoBUw158DpNR/P6cADqTYHV/pVNR+b407wmKk7bMa9S2kREAEREIFAE5j3m7N4TuyZ1Nax11HpIwIiIAJBJlDsMe5BHhvZJgIiIAIi4DsB425vtbFT1tD3ttSACIiACGRHQKEy2fHT0SIgAiIQTgKEAnbp0gXbbUxgOPuRtdWKk8kaoSoQARHIGwEJ97yhVkMiIAIiECACRADyarkAGVQoUyTcC0Ve7YqACNSdgEJl6s5MR4iACIiACESGwLSvYl1p1yMyHVJHREAEIkxAwj3Cg6uuiYAIiIAIpCSwYLqzoIKXLzltN01ZTjtFQAREIBAEFCoTiGGQESIgAiKQZwLLli279dZbafT888/nVXR5bj0ozRl3e8suToM1gmKS7BABERCB5AQk3JOz0R4REAERiDSB5cuXR7p/aXSu4otYIcXJpIFKRURABIJAQKEyQRgF2SACIiACIlAIAnoytRDU1aYIiEDGBCTcM0anA0VABERABEJOoEJPpoZ8BGW+CBQZAQn3IhtwdVcEREAERMAQ4IWpcyfFkm27C4kIiIAIhIKAhHsohklGioAIiIAI5JqAcbe36OiUr5nrqlWfCIiACPhCQMLdF6yqVAREQAREIOgEFOAe9BGSfSIgAl4CWlXGS0TbIiACIlAMBEpKSjp27EhPSRRDfxP0UcI9ARRliYAIBJqAhHugh0fGiYAIiIBPBOrXrz9w4ECfKg9HtX++M7VnOKyVlSIgAiLgOAqV0VkgAiIgAiJQfASWLnRm/RDrdrvNiq/z6rEIiEBYCUi4h3XkZLcIiIAIiEDmBKaPd5xqp2k7p0nrzCvRkSIgAiKQXwIKlckvb7UmAiIgAsEgsGzZsjvvvBNbzjrrrAYNGgTDqDxaoQD3PMJWUyIgArkiIOGeK5KqRwREQARCRmDRokUhsziH5kq45xCmqhIBEcgXAYXK5Iu02hEBERABEQgOAb0zNThjIUtEQATSJiDhnjYqFRQBERABEYgGgeVLnJnfxLrSVk+mRmNE1QsRKBYCEu7FMtLqpwiIgAiIwJ8EZkxwqiqd8rWc5uuIiQiIgAiEiICEe4gGS6aKgAiIgAjkgoANcC/al0/lgqLqEAERyD8BCff8M1eLIiACIiACBSXw56uXehTUCDUuAiIgAnUmoFVl6oxMB4iACIhABAiUlJS0b9+ejpCIQHfq1oU/Pe4KcK8bNpUWAREoOAEJ94IPgQwQAREQgQIQqF+//qBBgwrQcMGbXLHcmcbbl3hnas+C2yIDREAERKBOBBQqUydcKiwCIiACIhByArO+d1YsdRo0ddbsFPKeyHwREIGiIyDhXnRDrg6LgAiIQFETsHEypfoFLOoTQZ0XgTASUKhMGEdNNouACIhAtgSWL18+ZMgQajnttNMIm8m2uhAdr1cvhWiwZKoIiMDqBCTcV+ehLREQAREoDgLV1dXz5s2jrySKo8cre2k87nr10koe+l8ERCBEBHSjMESDJVNFQAREQASyI1BV5WgtyOwQ6mgREIECEpBwLyB8NS0CIiACIpBfArN/dpYtdMoaOS03zG/Dak0EREAEckBAwj0HEFWFCIiACIhAOAhUfBGzs82mTj1FioZjxGSlCIiAm4CEu5uG0iIgAiIgApEmoDiZSA+vOicCkScg4R75IVYHRUAEREAEVhKwa0GuzND/IiACIhAiArpXGKLBkqkiIAIikDMCJSUlrVq1ojoSOas04BWxfs6fwr1HwC2VeSIgAiKQkICEe0IsyhQBERCBiBNg7fZTTz014p30dG/er87iOU5pmdO6m2ePNkVABEQgFAQUKhOKYZKRIiACIiACWRMw7vbWGztlDbOuSxWIgAiIQAEISLgXALqaFAEREAERKAAB887UtoqTKQB7NSkCIpATAgqVyQlGVSICIiACISOwfPnyBx54AKMHDRpE2EzIrM/MXAW4Z8ZNR4mACASGgIR7YIZChoiACIhAHglUV1fPnDmTBknksdmCNiXhXlD8alwERCB7AgqVyZ6hahABERABEQg8gQXTnIXTWETHabtp4G2VgSIgAiKQmICEe2IuyhUBERABEYgUARPg3nJDp8EakeqXOiMCIlBMBCTci2m01VcREAERKFoC076Mdb3dZkULQB0XARGIAAEJ9wgMorogAiIgAiJQGwEFuNdGSPtFQASCT0DCPfhjJAtFQAREQASyJiDhnjVCVSACIlBwAlpVpuBDIANEQAREoAAESkpKmjdvTsMkCtB8npvkhalzJ8fabKtQmTyjV3MiIAK5JCDhnkuaqksEREAEwkKAtdvPPvvssFibpZ0l07+O1dCio1PeIsuqdLgIiIAIFJCAQmUKCF9Ni4AIiIAI5INAybSvYs200ztT80FbbYiACPhHQMLdP7aqWQREQAREIBAEJNwDMQwyQgREIGsCCpXJGqEqEAEREIEQEli+fPkjjzyC4QMHDiRsJoQ9qIPJK4V7zzoco6IiIAIiEDwCEu7BGxNZJAIiIAL+E6iurp46dSrtkPC/tUK2UG/FEuf3n2IWaBH3Qo6D2hYBEcgBAYXK5ACiqhABERABEQgsgeaLJ5c41U7Tdk6T1oE1UoaJgAiIQDoEJNzToaQyIiACIiACYSXQfPEvMdP1ZGpYB1B2i4AIrCIg4b6KhVIiIAIiIALRI9Bi0aRYpyTcoze06pEIFB8BCffiG3P1WAREQASKiUDzRb/EuivhXkyDrr6KQFQJSLhHdWTVLxEQAREQAcepXNJ0yZQYCAl3nQ4iIALhJ6BVZcI/htHqwYqq6nETZ89YsKR100bbdFqrXmkuX8bud+VjJ87+dFbJ2hNn99qgdQ4t99vsMAL3m4lPQ8nF6p/lGdTcuHHjdL8/qlY4k0Y7C6c7Tdo4Hbd3Suule2Ct5fyrmaarVpR++WSpU1XdoGlJk7a12qICIiACIhBwAhLuAR+g4jJv5PiKwS9NqJi3xHS7XfNGVx3Qbe9N2+WEQr4qr/fvHz7JoeX5MjvGOIdmU5t/lvtX8+pm53goV6+crVwCz4BJgwYNLrjggpgdtX4mvOiMvMiZH1s7MvZp1t7Z+2an24FmK6u//tWMWTWV16sxu2TZAufO7jkzO6s+62AREAERyJyAQmUyZ6cjc0sA5XHKY59Z1U7l0+YtIYf87BsKaeUhNZvx8s9y/2r21WxfK/eVSUz+PtV/lWqnJ/MrYjnkZ/nxr2YM87XyLDuuw0VABEQgUwLyuGdKTsfllAB3+fG1e14DYzbPe/rLTybNKS3JPGamqrp62NjJoas8pGZzXvhnuX81+2q2r5UnY8IFwzW1R7e2WUVtEceCr51F0Ff71GyOOMX59SOnJFPvT3WV8+mjvtSMqUkrL3FGXuxstF8uQ31WI6MNERABEfCXQEnk35nnL7+Map8/f37z5s3nzZvXrFkzKuDF46+++uq+++4b+beOp6A15qff+z3wUYoC2iUCIpABgWGDtuvVee2EB/LN8/jjj7PrqKOOSvrlM3GU8+j+CQ8PceaAl51OO4TY/mI1Xb+VkRl5z1B6RFFkuulTR+Rx9wmsqq0bAZ5GTXHArhu13qB1kxQFUu/6ccbCd76dkaxMYCsPqdlw9s9y/2r21WxfK0/NJMWVhddm0qRJ2JbKfcPTqCk+XfZyWm2YYn+qXTO/d354PWmBbGqm0tSVp+5UUpu0QwREQAQKT0DCvfBjIAsgwBoyKTgM2mH9ZF7DFEfZXbjzUwj3wFYeUrPB7p/l/tXsq9m+Vp6aSeory14jSROsIZPis/0Zmbuu8eWnEO7Z1IzBqStP3akU/dUuERABESg0gUzDEwttt9qPGAFWfmRJk/gwdnLIZ282/Q1p5SE1m5Hyz3L/avbVbF8r95VJbOVH1pBxEl2azTrE9mb88a9mTPK18oy7rANFQAREIGsCEu4xhEOGDFlvvfUaNWq07bbbjhs3LiHVO+64o2vXruXl5euuu+4555yzZMmq0I4pU6YcffTRa6+9Nnu7d+/+ySefJKxBmSkI8PwcKz9SwC0QTJr8rJ6uc5yQVh5SsxlE/yz3r2Zfzfa1cl+ZxB7iZOXH2Cfu0tz7pqwe8fSvZoz1tfIaHPojAiIgAgUhIOHuDB8+/Nxzz73qqqs+++yzHj167LXXXjNmeOOhn3jiiYsvvpgy33zzzdChQznk0ksvNQM2Z86c3r1782jXa6+9NmHChNtuu23NNdcsyFiGvVHWa7/36C1aNmloO9K2eSNycrKOu6mcCsNVeUjNBrJ/lvtXs69m+1q5r0xi67Uf/m+nmet1Cvjgycl+HXf/aga3r5VTvz4iIAIiUAgCWlXGwcu+9dZb33333fCvqqrCoX7GGWcg093DcfrppyPZ3377bZN53nnnjR079oMPPmCTkh9++OGoUaPc5VOnPQ9Qex6vTn1s5Pd+8P3Mox8a16ZZwzv6bk4MAN7EHHY5g1dLpt86lY/5ccYbo8buucO2enOq4eYfcP9qxnL/htJU7tOrauvKZNmyZTfeeCMmXXLJJbyMyQxZ0r/+vd/Uv5rpTNWKyp/f/2LU6z132Kts/R2zukWQFI125ImAfivzBNr/ZjxD6RFF/rcf7hai83Aq7vDjjjuuY8eOdRoQfro+/fRTfrfMUaWlpbvvvvuYMWM8lWy//faPPfYYUTTbbLPNzz//zOqNxxxzjCnz4osv4qQ/7LDD/vvf/3bo0OHUU08dNGiQ53Btpk+gYn4sBmnDNk2zeRo1WXNMA/yo1jRH5dt2Wuv3b6r5m9v5ht9m+8rEp8r9ZuLTUHKq+Gd5BjUnXQUy/hIi+MSnJRT9q5lelNar7thnyv/m9+jYR6o9flSVIwIiEDoC0RHuL7zwwvXXX7/TTjsdf/zxhx56aMOGqyIuUozKrFmzVqxY0abNqpUTSH/77beeQ4488khK9unTh3XTKisrTz75ZBsqg46/9957CbYh5+OPPz7zzDPxXQ0YMMBTw9Kaj8lkckmCGScfk7B/TYFi/vvr7D/ofrtmDQ2ccKFwD2i4LJe1HgLFMJQlJSUXXHCB6XgYLzfPkCXbLIahTNb3iOVrKCMzoJ6hjPD3jx9DFqlQmc8///zhhx8eNmwY2vqII47AAU8MTGpqU6dOxU0+evToXr16mZIXXnghvnMiYdwHvvfee1R43XXXEVfz448/nnXWWbjVr7jiCsog07faaitqMOUR7sj3eJ/91VdfPXjwYHedxM03btzYnaM0BIb9VPrRjNJ9112x1zqelzUKjwiIgAiIgAiIQNQILFq0CPeofStl1LqX6/5Ex+MOmc1rPjwe+tJLL6HgeWZ0o402wgE/cOBA3lSaEF3Lli3r1as3ffqql4yQbtu2racwGp3YmBNOOIF81o35448/TjzxxMsuu4zQmnbt2nXrFlsOxXw23njjZ599duXWqv+JxsErb7bxuBNJv+eee9o3p7755pt77LFHHW5br6o4aqmnHvnUmfH7Tltvtu/mHULXN9wGGsrQjVpCgzWUCbGEMVNDGcZRS2izhjIhljBmeobShCGEsSMFsTlSwt0QJJqFc4LgdRIs8MJTp8juBx54oG/fvvGI8ZdvueWWPHV60EEHsZeHU0nzKKqnJNNBNLrNROuTNq8bZHrw3Xff2V3ff/99wjh7Qnc80TvIdLdS92zaCostYWLc1127iRtOuCBoKMM1XimsjfZQcmfyqaeeovuHH354WVkEfwvcIxvtoXT3NPJpDWVkhtgOJYnIdCoPHYnUlzWPmZpQGSRy//79WZ19gw02AOJdd91FBEtC4c5eHOGEpBPuwoOnLNaON/3YY48lnxqIojGrLhxwwAG33347Dn0TKsNMgBwj31nTnUdXb7jhBn78eHr1XzWfPIxcJJtgLjR17mK61qFFeSQ7qE6JQHAI4Kf44YcfsIdEcKySJSIgAiIgAikIREe4E8HCQ6XEn7DOulXVpuf9+vUjKj0ZBQT9zJkzr7zyymnTpvXs2XPkyJHmWdXJkydbL/vll1/Og1z85V1LrVq1on4ehDUVEkb//PPPEwlzzTXXdOrUCel/1FFHJWtL+akJzFm0fMnymIZwL7ie+hDtFQEREAEREAEREIEiIRAd4Y7Dm6dR8ZHHjxyB7KldSsTGxIfH8ECqrYr7yCw3ycfmuBP713zcOUpnRsC423kHU8OyWDCSPiIgAiIgAiIgAiIgApZAdIS7WePFdkyJMBJYGSez6v2mYeyFbBYBERABERABERABPwiseuDSj9rzWSdrt998883uFm+55Rbei+TOUTrgBIxwb68A94CPk8wTAREQAREQAREoBIHoCPf3339/3333dTPcZ599yHTnKB1wAlPnxV6bKuEe8GGSeSIgAiIgAiIgAgUhEB3hvnDhQtZ2dENkgSEtDuoGEvz0lJolZSTcgz9SslAEREAEREAERCD/BKIT486qMsOHD2dxGAvxySefdL8ayeYrEVgCinEP7NDIsOgRwNOR7IH76HVWPRIBERCBaBCIjnDn4dRDDjnkp59+2nXXXRkb3qM0bNiwp59+OhrjVCS9UIx7kQy0uikCIiACIiACIpABgegId9ZWHzFiBC9CeuaZZ8rLyzfbbLO33nprp512ygCKDikIgWWVVTMWLKVphcoUhL8aFQEREAEREAERCDiB6Ah3QO9X8wk4cZmXjMD0+Uuqq50GZaVrr7HaswrJyitfBEQgGwKVlZW8PI4aDj74YF5VkU1VOlYEREAERCA/BKLzcGp+eKkV/wj8GSfTvBEvqfWvFdUsAiJgCPBaugk1n9TvpxMuERABERCB4BCIjpdlxYoV//jHP5566qnJkycvW7bMIp49e7ZNKxFkAlPnLcY8xckEeYxkmwiIgAiIgAiIQAEJRMfjPnjw4Ntvv71v377z5s0799xzeVC1tLT06quvLiBcNV0nAlPnahH3OgFTYREQAREQAREQgeIiEB3h/vjjjz/wwAPnnXcewZr9+vV78MEHWRryo48+Kq7xDHNvtYh7mEdPtouACIiACIiACPhOIDrCfdq0aSzlDrAmTZrgdCex//77v/LKK74jVAM5IqBF3HMEUtWIgAiIgAiIgAhEk0B0hPs666xTUVHBKHXu3PmNN94g8fHHHzds2DCa4xbFXmkR9yiOqvokAiIgAiIgAiKQMwLREe6saMZLlwBzxhln8DKmLl269O/f/7jjjssZKlXkM4EKxbj7TFjVi4AIiIAIiIAIhJpAdFaVuemmm8xI8Hxqx44dR48ejXbnrUyhHp7iMX7+kuULllbS3/bNy4un1+qpCBSQQP369S+55BIMIFFAM9S0CIiACIhA+gQiItyXL19+0kkn4Wjv1KkTnd+u5pM+BZUsOAETJ7Nm4/rlDeoV3BgZIALFQIAXJjRooJedFcNQq48iIALRIRCRUBk8Rs8++2x0hqX4eqIA9+Ibc/VYBERABERABESgbgQiItzp9EEHHTRixIi69V6lA0NgigLcAzMWMqRICFRWVvKdyYdEkXRZ3RQBERCBsBOISKgMw0BE+zXXXPPhhx9uueWWa6yxhh2YM88806aVCCyBlWtBKsA9sEMkw6JGoKqq6ssvv6RX++67b9T6pv6IgAiIQEQJREe4Dx06tEWLFp/WfOxgEcQp4W5pBDmxMlSmUZCNlG0iIAIiIAIiIAIiUEAC0RHuEydOLCBHNZ0lgZXCXR73LEHqcBEQAREQAREQgcgSiE6Me2SHqDg6NlUx7sUx0OqlCIiACIiACIhAxgSi43FP9q6lhx56KGM6OjA/BFZUVU+bv4S2tIh7foCrFREQAREQAREQgTASiI5wnzNnjh0AlnUfP3783Llzd911V5upRGAJzFiwBO1eVlrSqmnDwBopw0RABERABERABESgsASiI9yff/55N0oWTDjllFM6d+7szlQ6mARMgHvb5o3qlZYE00JZJQIiIAIiIAIiIAIFJxAd4e5BWVpaeu655+68884XXnihZ5c2g0ZAi7gHbURkTzEQ4L11559/Pj0lUQz9VR9FQAREIAIEIivcGZuffvpJLxYJxTmqRdxDMUwyMmIEWC3X/cqLiPVO3REBERCBSBKIjnDHv25HqLq6uqKi4pVXXhkwYIDNVCKwBFauBalF3AM7RDJMBERABERABESg8ASiI9w///xzi5M4mVatWt12223JlpqxJZUIAoGVwl2LuAdhNGRDsRDghuTrr79Ob/faa6+ysuj8FhTL+KmfIiACRUkgOl/W7777blGOYBQ6rUXcozCK6kPYCPAE/yeffILVe+yxR9hsl70iIAIiUKQEovMCJt6c+sMPP7iHkc1ffvnFnaN0MAlMnbcYw7SIezBHR1aJgAiIgAiIgAgEhEB0hPvAgQNHjx7txjp27Fgy3TlKB5DAH0sr5y5ajmHtWyjGPYDjI5NEQAREQAREQASCQiA6wp0Y9969e7u5brfddl988YU7R+kAEqiocbc3bVTWtJHWpAvg+MgkERABERABERCBoBCIjnBnabMFCxa4uc6bN2/FihXuHKUDSMAs4t6hhZ5MDeDgyCQREAEREAEREIEAEYiOcN9xxx1vvPFGq9RJsNmnT58AwZYpiQhoSZlEVJQnAiIgAiIgAiIgAl4C0VlV5uabb0a7d+3adYcddqCXo0aNmj9//jvvvOPtsbYDRmClcFeAe8AGRuaIgAiIgAiIgAgEjEB0hHu3bt2++uqru++++8svvywvL+/fv//pp5++1lprBQy4zPESmDK3ZkkZhcp4wWhbBPwlUL9+/bPOOos2SPjbkmoXAREQARHIEYHoCHeAtG/f/oYbbsgRGVWTJwIVc5fQkmLc84RbzYjASgI8F9SiRYuVW/pfBERABEQgBASiE+P+8MMPP/30027kbD766KPuHKUDSMAs4t6uuR5ODeDgyCQREAEREAEREIEAEYiOcOdR1JYtW7rRtm7dWg54N5AApquqqo3HXYu4B3B0ZFK0CfAE/xs1H/tMf7T7q96JgAiIQAQIREe4T548uVOnTu4h6dixI5nuHKWDRmDWH0uXragqLXHaNNPDqUEbHNkTcQLo9TE1Hwn3iI+0uicCIhAhAtER7vjXeTjVPTQ8pbr22mu7c5QOGoGpNQHuqPb69aJzKgYNsuwRAREQAREQARGIBoHoqKV+/fqdeeaZ7777Lt4jPiwEyYIJRxxxRDTGKaq9WLkWpALcozrC6pcIiIAIiIAIiEDOCERnVZlrr732l19+2W233crKYp2qqqpiRcjrr78+Z6hUkQ8EJNx9gKoqRUAEREAEREAEokkgOsK9QYMGw4cPv+6667744gvWce/evTsx7tEctAj1auUi7gpwj9CgqisiIAIiIAIiIAL+EIiOcDd8utR8SPPa1HvvvXfo0KGffPKJP+hUaw4IaBH3HEBUFSIgAiIgAiIgAsVBIGrCnVEjzP2hhx567rnnmjdvfvDBBxfHOIa1l1rEPawjJ7tFQAREQAREQATyTiA6wn3KlCmPPPIIr2GaO3funDlznnjiicMPP5xXA+YdqRqsA4GVMe4KlakDNBUVgZwQqF+//imnnEJVJHJSoSoRAREQARHwm0AUVpV59tln9913365duxLdftttt02dOrW0tJQYd6l2v8+eLOtfsnzFrIXLqKRDC60qkyVLHS4CdSbANySr6PLRV2Wd2ekAERABESgQgSh43Pv27XvRRRfxZGrTpk0LhFHNZkKgYt4SDmvcoF7zcjn8MgGoY0RABERABERABIqKQBQ87scff/yQIUP23nvv++67jyCZohq/UHd2ZZxMuRx+oR5HGR9SArzv4r2aD4mQdkFmi4AIiECxEYiCcL///vsrKipOPPHEYcOGtWvX7q9//Wt1dTXruBfbWIauvyvXglScTOiGTgZHgQB6/b81Hwn3KAyn+iACIlAcBKIg3BkpFm4fMGAAv0Fff/31Jpts0qZNm969ex955JGsLVMc4xjKXhqPe4cWejI1lMMno0VABERABERABPJMICLC3VJjGfcbbrjh119/feyxxxYtWtSvXz+7S4mgETCLuLdrLo970EZG9oiACIiACIiACASRQBQeTo3nyqoyB9R8ZsyYEb9XOQEhYBZxb68lZQIyHjJDBERABERABEQg2ASi5nH30GalM0+ONoNDYGWMu0JlgjMmskQEREAEREAERCC4BCIu3IMLvugt4wHilTHuCpUp+rNBAERABERABERABNIgIOGeBiQV8YHAnEXLlyyPrfzTtrk87j7wVZUiIAIiIAIiIAKRIxDNGPfIDVMEO2Tc7a2aNmxYVi+C3VOXRCDwBMrKyk444QTMJBF4Y2WgCIiACIhAjEB0PO7rr7/+77//7h7VuXPnkunOUTo4BFYGuCtOJjhjIkuKiwAP8Xeo+ZAorp6rtyIgAiIQWgLR+b7+5ZdfPK8RWbp06ZQpU0I7NBE3vGLuYnqoRdwjPszqngiIgAiIgAiIQO4IROEO6YsvvmiAvP76682bNzdpRPzbb7+93nrr5Y6VasolganzllCdFnHPJVPVJQJ1IcCX5EcffcQR2223Xb16ilirCzuVFQEREIECEYiCcD/ooIOgV1JSwstTLcb69euj2m+77Tabo0SgCChUJlDDIWOKkADC/a233qLjW2+9tYR7EZ4A6rIIiEAYCURBuFdVxRYn6dSp08cff9yyZcswDkMR2rxyLUgtKVOEg68ui4AIiIAIiIAIZEIgOjHuEydOdKt2nkxNn8eQIUNwzzdq1GjbbbcdN25cwgPvuOOOrl27lpeXr7vuuuecc86SJbFIDz5XX301zn772WijjUy+/qYmYIS7XpuampL2ioAIiIAIiIAIiIAlEB3hfvPNNw8fPtx07LDDDltrrf+3d+dhUlRnvMeZlRm2GVRkF9wVZRMEEYwmsoiGgCsKV8QoBJFHkAcTQIEgAsYFd8RrULxBBTRREx8XiAkaAwSFYFDBB9xQGDYJ2wzDrPfHFFNpexZm6eWcU9/+Q6urq0695/N2D++cOXX6OK2X8Mknn/hdrWhDZ40fP37atGlr167t2LFjv379du7cGXbwSy+9NHHiRB2zYcOG+fPn65TJkyf7x5xzzjlZpY8PP/zQ389GRQJ5BUU7DxzWqxTuFRGxHwEEEEAAAQQQCBNwp3CfN2+exsLVvWXLlmni5jvvvNO/f/+77rorrMNln86ZM2fEiBE333xzu3bt1Ei9evWee+65sMNWrFjRs2fPIUOGaGC+b9++N9xwQ+jAvFZBblb6CB31D2uEp77Ajv25xcV1UpMTj6+f6u9kAwEEEEAAAQQQQKASARfmuHvd2759u1e4v/nmm9ddd53KaxXZmvpSSef1Ul5e3po1ayZNmuQdpvWMe/fuvXLlyrCzLrzwwoULF6pY79at21dfffXWW2/deOON/jGbNm1q0aKFZtr06NFj9uzZJ510kv+Sv6G1KfXwnu7fv18b+SUPb8P/r3eA8//9dvcB9bFFRlpBQYFLnVVK1R3vvy71K4B9CUIq/TeqNjTZz9UsByGVruYurF+kMgzE3qdhqfSe2tudGEfuTuHeuHHj7777TrW7xtrvu+8+ORYXF4et7F4Wd/fu3TqmadOm/kva3rhxo//U29BYu47s1auX2lStOWrUKH+qjH43WLBggaa/a7LM9OnTL7rook8//bRhw4ZhLaig16uhO5cuXarRfX+P/lDgbzu/8dEuVQlJqQUH9SuQe50NVCrdS19oj9xOpf/jUQvpOr+qjNupDH3TOr9NKp1JsZ/KnJwcZzoVg464U7hfddVVKq9PP/10fX+qJsnI7t///vdpp50WEcTly5fPmjVr7ty5KtM3b948duzYGTNmTJkyRY1719JGhw4d9GqbNm2WLFlyyy23hF1Xg/qaSe/t1Ii7fsHQ3wQaNWqkPfpdU2/fPn36aAnLsLNcffrt+1/V2bz53FNaXX75uS71MYCpdCl9oX0JQiq1Hpd+aqnX+nHk8JenBiGVoW9dh7dJpTPJDUulNw3Bmd5FuyPuFO6PPPKI5sZo0P2BBx5o0KCB4DQEPnr06MoFNSVdQ007duzwD9O25qv7T70N1eiaG3Prrbfqafv27bOzs0eOHHn33XeH/WuXmZl5xhlnqLIPO11P65Y8QverTA+t1MOehh7p3vb2A3nqVKvj6ocKONPNQKXSmayV2xHnUxmpoY1y9Yza6XwqjdKOajCkMqq8sWzcT6U2Ynld26/lTuGuxE+YMCE0H1q0MfRpudupqaldunTRd6x63+KkIShtjxkzJuxg/R0ntEb3/qysaTNhhx08ePDLL78Mnf4edgBPPQEWceedgAACCCCAAAIIVFfAnVVl1PM//OEPmoau+0S//fZbPdXK62+88cYxRTSD5dlnn33hhRe01ONtt92m0XStMKOzhg0b5t+0OmDAgKeffnrRokVaLV7TWjQArz1e+a7fFt5///1vvvlGK89ceeWV2qk1Z4550YAfwCLuAX8D0H0TBDTHXTfc6+FPdjchKmJAAAEEEKhEwJ0RdxXWU6dOHTdu3MyZM71/hzRxRbX7wIEDK+m/Xho8ePCuXbt0rtal6dSpk+5t9e5V3bJliz/Kfs8992jVBf1369atTZo0UdWuq3jNfv/996rUNbFe+/Vrw6pVq7RR+RUD/qr+UrH1v4eEwCLuAX8n0P34Cujn5Ntvv60Y9HPPG4aIbzxcHQEEEEDgmALuFO5PPPGEBs414+X+++/3ut21a9ewyTMVcWhuTNnpMboh1T9eK7Xr25f08Pf4GxqG97fZqIrA/tyC7LxCHdkiI70qx3MMAggggAACCCCAgATcmSqjSSydO3cOTapuB9W8l9A9bJsg4M2TOa5+anpqkgnxEAMCCCCAAAIIIGCFgDuF+8knn7xu3bpQdE16Ofvss0P3sG2CQNY+b55MmgnBEAMCCCCAAAIIIGCLgAtTZe69915NidE9prfffntubq6mUOt2q5dfflnfefT73//elkwEJ86te3PV2ebMkwlOyukpAggggAACCERCwIXCXd9Iqq8y1SLr6enpun9USzfqm5i0tsxjjz12/fXXR0KJNiIpULoWJBPcI6lKWwgggAACCCDgvIALhbu/nvrQkocKd62nfuKJJzqfPEs7WLoWJFNlLE0gYSOAAAIIIIBAfARcKNwlp7Uafb96JQ//KRumCZQW7oy4m5YZ4gmWgBbL8r50QhvB6jm9RQABBKwVcOTn9RlnnBFau4emY8+ePaFP2Y67wLaSOe4s4h73RBBAwAX0PRX6yRlwBLqPAAII2CXgSOGuae4ZGRl20Qcz2oLCou37j9yc2jKTEfdgvgXoNQIIIIAAAgjUUMCRwl03oTKpvYZvgdietvPA4cKi4pSkhCYN6sb2ylwNAQR+JKBvTl2/fr12tW/fnm9O/RENTxBAAAFTBVwo3CuaJGOqeaDj8hZxb5aRlpj4v9sSAi1C5xGIk4AK9zfeeEMXb9euHYV7nJLAZRFAAIHqCbjwBUz+qjLV6zpHx0OARdzjoc41EUAAAQQQQMAFARdG3IuKilxIRTD6wCLuwcgzvUQAAQQQQACByAu4MOIeeRVajJpA6VqQLOIeNWIaRgABBBBAAAFHBSjcHU2sqd0qLdxZUsbUDBEXAggggAACCJgqQOFuamYcjcub484i7o6ml24hgAACCCCAQBQFKNyjiEvTZQWY417WhD0IIIAAAggggEBVBFy4ObUq/eQYEwSyDxfsO5SvSJpnMMfdhIQQQ6AFkpOTr7nmGhFoI9AQdB4BBBCwR4Cf1/bkyv5IvUXcG6UlN0xLsb839AABuwUSExPPOeccu/tA9AgggEDABJgqE7CEx7W7THCPKz8XRwABBBBAAAG7BRhxtzt/dkXPkjJ25Yto3RbQN2Bs2LBBfTz77LM1+u52Z+kdAggg4IYAP6zdyKMdvSgt3Jngbke+iNJtgYKCgldLHtpwu6f0DgEEEHBGgMLdmVRa0JGtew8pStaCtCBVhIgAAggggAAC5glQuJuXE3cjYi1Id3NLzxBAAAEEEEAg6gIU7lEn5gK+wLa9udpmxN0HYQMBBBBAAAEEEKi6AIV71a04slYCRUXF3nKQFO61cuRkBBBAAAEEEAiqAIV7UDMf837vzj6cX1icmFCnacO6Mb84F0QAAQQQQAABBKwXoHC3PoW2dMCbJ9NUX7+UxLvOlqQRJwIIIIAAAggYJMA67gYlw+1QSteCTHe7m/QOAVsEkpKSBg4cqGi1YUvMxIkAAggEXIDCPeBvgNh1n8I9dtZcCYEqCKhe79SpUxUO5BAEEEAAAVMEmLRgSiacj6N0EXe+fcn5VNNBBBBAAAEEEIiKACPuUWGl0bICLOJe1oQ9CMRRoKioaPPmzQrgtNNOS0xkECeOqeDSCCCAQFUF+GFdVSmOq6XA0UXcM5jjXktITkcgMgIFBQUvlzy0EZkWaQUBBBBAIMoCFO5RBqb5UgHmuJdK8H8EEEAAAQQQQKAmAhTuNVHjnOoK5OYX/pCdp7NaZDLHvbp4HI8AAggggAACCBwRoHDnfRALgax9ubpMvdSkjPSUWFyPayCAAAIIIIAAAs4JULg7l1IjO+TPk0lISDAyQIJCAAEEEEAAAQRMF6BwNz1DbsRXuhYkd6a6kU96gQACCCCAAAJxEKBwjwN6AC9ZuhYkE9wDmHy6jAACCCCAAAKREWAd98g40krlAkenyrAWZOVMvIpADAX0zan9+/fXBbURw8tyKQQQQACBmgtQuNfcjjOrLnB0EfdMpspU3YwjEYiugOr1bt26RfcatI4AAgggEFEBpspElJPGKhDYtu+QXmlB4V6BD7sRQAABBBBAAIFjCjDifkwiDqitQHFxcemqMsxxry0m5yMQKYGioqItW7aotZNOOikxkUGcSLnSDgIIIBBFAX5YRxGXpj2B/+bk5+YXabtZBoU7bwoETBEoKCh4oeShDVNiIg4EEEAAgUoFKNwr5eHFSAh4w+1NGtatm8w9cJEApQ0EEEAAAQQQCKQAhXsg0x7bTrOIe2y9uRoCCCCAAAIIuClA4e5mXo3qFYu4G5UOgkEAAQQQQAABSwUo3C1NnE1hs4i7TdkiVgQQQAABBBAwVYDC3dTMOBQXi7g7lEy6ggACCCCAAAJxE6Bwjxt9cC7MIu7ByTU9RQABBBBAAIHoCbCOe/RsafmoAIu481ZAwEABfXNq7969FZg2DAyPkBBAAAEEygpQuJc1YU8kBfIKinYeOKwW+drUSLLSFgK1FlC93rNnz1o3QwMIIIAAArETYKpM7KyDeaUd+3OLi+ukJiceXz81mAL0GgEEEEAAAQQQiIgAI+4RYaSRCgW8RdxbZqYnJCRUeBAvIIBAzAWKioqysrJ02ebNmycmMogT8wRwQQQQQKD6Avywrr4ZZ1RHgAnu1dHiWARiJ1BQUPD7koc2YndVroQAAgggUAsBCvda4HFqFQRYxL0KSByCAAIIIIAAAggcW4DC/dhGHFEbga17c3U6d6bWxpBzEUAAAQQQQAABCVC48zaIrkDWvkO6gOa4R/cytI4AAggggAACCLguQOHueobj3T9vqkzzzLR4B8L1EUAAAQQQQAABuwUo3O3On+HRFxcXb/3vkRF3psoYninCQwABBBBAAAHzBSjczc+RxRHuzy3IzitUB1pkMFXG4jwSOgIIIIAAAgiYIMA67iZkwdkYvHkyx9VPTU/lO9WdzTIds1RA35x68cUXK3htWNoFwkYAAQSCJkDhHrSMx7S/LOIeU24uhkB1BFSvX3LJJdU5g2MRQAABBOIswFSZOCfA7cuziLvb+aV3CCCAAAIIIBBLAUbcY6kduGuxiHvgUk6H7RHQveO7du1SvE2aNElISLAncCJFAAEEgivAiHtwcx+DnrOIewyQuQQCNRPIz89/uuShjZq1wFkIIIAAAjEWoHA/Av7UU0+1bds2LS2te/fuq1evLjcHjz766Jlnnpment66des777wzN/fIF4KGPu6//36NWo0bNy50Z8C3WcQ94G8Auo8AAggggAACERSgcK+zePHi8ePHT5s2be3atR07duzXr9/OnTvDiF966aWJEyfqmA0bNsyfP1+nTJ48OfSYjz766JlnnunQoUPoTra37T3y6w2LuPNOQAABBBBAAAEEai9A4eHP9oEAACUdSURBVF5nzpw5I0aMuPnmm9u1azdv3rx69eo999xzYbIrVqzo2bPnkCFDNDDft2/fG264IXRg/uDBg0OHDn322WcbN24cdmKQnxYUFm3ff6Rwb5nJIu5BfiPQdwQQQAABBBCIjEDQb07Ny8tbs2bNpEmTPM7ExMTevXuvXLkyTPfCCy9cuHChivVu3bp99dVXb7311o033ugfc/vtt19xxRU68b777vN3hm0cLnl4O/fv368NzSv1ppaG/jfsLKufZu3LLSwqTklKyKyb6PXR6u5UJXhXU1mVvjt2TBBS6X8qteHwzalBSKVjn76KukMqK5Kxbn9YKr2n1vUiXgEHvXDfvXt3YWFh06ZN/QRoe+PGjf5Tb0Nj7TqyV69eWoehoKBg1KhR/lSZRYsWaY6NpsqEnRL2dPbs2dOnTw/duXTpUo3u+3uWLVvmb7ux8dWRX0+SGyUXvfPO2270qIq9cC+VVey4e4e5nUr96PNS9u677zr/HUxup9K9j14lPSKVleDY9ZKfypycHLsij2+0QS/cq6i/fPnyWbNmzZ07V3evbt68eezYsTNmzJgyZcp3332nbb35dGNr5U1pUF8z6b1jNOKuO1w15aZRo0bao9811UKfPn1SUlIqb8SuV//yn6w6n60/tflxl19+vl2R1zhaV1NZYxB7TwxCKvX3xvXr1ytHurEnNTXV3mRVHnkQUlm5gDOvkkpXU+lNQ3Cmd9HuSNAL9xNOOEFDTTt27PChtd2sWTP/qbehGl1zY2699VY9bd++fXZ29siRI++++25Ns9GdrOedd553mEawPvjggyeffFLzYsJGsOqWPEKbVZkeWqmHPQ090tLtHQeOrDHXqnG90G5a2pdqhe1eKqvVfZcOdjuVmhnYo0cP5UvjDmE/r1xKotcXt1PpXr4q6RGprATHrpf8VGrDrsjjG23QC3eNM3Xp0uW9994bNGiQMlFUVKTtMWPGhGVFf8fRP3L+Tu8fOU2bufTSS70hK+8l3eF61lln/eY3v3H+X0GfopINbxF3lpSphIiXEIijgH5M6e9+cQyASyOAAAIIVFcg6IW7vDSD5aabburatatuPNVi7RpNV/2t/cOGDWvZsqXmpmt7wIABWnymc+fO3lQZDcBrj/7Za9iw4bnnnuuj169f//jjjw/d478UwA0WcQ9g0ukyAggggAACCERPgMK9zuDBg/W931OnTt2+fXunTp3eeecd717VLVu2+KPs99xzj1Zd0H+3bt2qrwdX1T5z5szoZcWNlreyiLsbiaQXjgrob4b79u1T5zIyMhxeVcbR7NEtBBAIqACF+5HEa25M2ekxuiHVf1MkJyfr25f08PeUuxF6SrkHBGqnN+LOIu6BSjqdtUhAt/o99thjCli3zjt8c6pFGSFUBBBA4JgC/5u3fcxDOQCBqgscPFyw79CRm1ObZxxjvZ2qt8mRCCCAAAIIIIBAkAUo3IOc/Sj2PWvvIbXeKC25YRp3i0fRmaYRQAABBBBAIDgCFO7ByXVMe7q1pHBnSZmYonMxBBBAAAEEEHBagMLd6fTGr3PbSu5MZYJ7/DLAlRFAAAEEEEDANQEKd9cyakh/vEXcm2cywd2QhBAGAggggAACCFgvQOFufQrN7ABTZczMC1EhgAACCCCAgL0CLAdpb+6Mjpy1II1OD8EhUKeOvqdCXzwnCf8LK1BBAAEEEDBcgMLd8ATZGp43x52bU23NH3EHQEBfT3HFFVcEoKN0EQEEEHBHgKky7uTSnJ4UFRV7c9wp3M1JCpEggAACCCCAgO0CjLjbnkET49998HB+YXFiQp2mDeuaGB8xIYBAnTrFxcU5OTmSqFevXkJCAiQIIIAAAuYLMOJufo7si9C7M7WZvn4piTeYfekj4oAI5OfnP1Ty0EZAukw3EUAAAdsFqKtsz6CJ8Wfty1VYzJMxMTfEhAACCCCAAALWClC4W5s6gwP3lpRpnplucIyEhgACCCCAAAIIWCZA4W5ZwqwIt3QRd759yYp0ESQCCCCAAAII2CFA4W5HnuyKkkXc7coX0SKAAAIIIICAFQIU7lakybIgjy7insFUGcsSR7gIIIAAAgggYLIAhbvJ2bE1Nm/EnZtTbc0fcSOAAAIIIICAkQKs425kWmwOKje/8IfsPPWgJTen2pxHYndeIDExsWPHjuqmNpzvLB1EAAEE3BCgcHcjjwb1whtur5+a1Cidd5dBeSEUBMIEkpOTBw0aFLaTpwgggAACJgsw0GJydqyMzV/Ene9itDJ/BI0AAggggAACpgowJmpqZqyNy1sLkkXcrU0ggQdFoLi42PvO1JSUFH7NDkrW6ScCCFguwIi75Qk0L/zStSBZxN283BARAiECqtpnlzy88j3kFTYRQAABBAwVoHA3NDH2hnV0SRnWgrQ3hUSOAAIIIIAAAkYKULgbmRabgzq6iDtLyticRGJHAAEEEEAAAQMFKNwNTIrdIbGIu935I3oEEEAAAQQQMFWAwt3UzNgZl253825OZRF3OxNI1AgggAACCCBgrgCFu7m5sTGyPdl5hwuKEhLqNM2oa2P8xIwAAggggAACCBgrQOFubGqsDMxbxL1Jg7p1k5Os7ABBI4AAAggggAACpgqwjrupmbEzLhZxtzNvRB1EgcTExHbt2qnn2ghi/+kzAgggYKEAhbuFSTM4ZBZxNzg5hIbAjwSSk5OvvfbaH+3iCQIIIICA2QIMtJidH9uiYxF32zJGvAgggAACCCBgjQCFuzWpsiJQFnG3Ik0EiQACCCCAAAI2CjBVxsasmRuzN8e9Bd++ZG6KiAyBowJ5eXmzZ8/Wk0mTJqWmpuKCAAIIIGC+ACPu5ufIpghL57in2xQ0sSKAAAIIIIAAAjYIULjbkCVLYswrKNp18LCCbZGZZknIhIkAAggggAACCFgjQOFuTarMD3TH/tzi4jp1kxOPq8+f3c1PFxEigAACCCCAgGUCFO6WJczkcP0J7gn66lQeCCCAAAIIIIAAAhEVoHCPKGewGzu6FiTzZIL9NqD3CCCAAAIIIBAlAQr3KMEGsVkWcQ9i1ukzAggggAACCMRKgOUgYyUdgOts3ZurXrIWZABSTRddEEhMTDz99NPVE2240B/6gAACCARAgMI9AEmOVRdZCzJW0lwHgQgIJCcnDxkyJAIN0QQCCCCAQKwEGGiJlXQArlM6x51F3AOQbLqIAAIIIIAAAjEXoHCPObmjFywuLi4t3FnE3dEc0y0EEEAAAQQQiKsAU2Xiyu/QxffnFmTnFapDzHF3KKt0xWWBvLy8hx56SD2cMGFCairfveByrukbAgg4I0Dh7kwq49wRb7hdX72UlpIU51C4PAIIVE0gPz+/agdyFAIIIICAEQJMlTEiDQ4EwTwZB5JIFxBAAAEEEEDAZAEKd5OzY1NsRwv3DO5MtSlrxIoAAggggAACFglQuFuULKNDZRF3o9NDcAgggAACCCBgvwCFu/05NKMH3oh7y0xG3M3IB1EggAACCCCAgHMCFO7OpTROHSqd407hHqcEcFkEEEAAAQQQcF2AVWVcz3Cs+ldauLOIe6zEuQ4CtRNISEho06aN2tBG7VribAQQQACBGAlQuMcI2u3LFBQW7ThwWH1kEXe3E03vXBJISUkZPny4Sz2iLwgggIDzAkyVcT7FsejgzgOHC4uKU5ISmjSoG4vrcQ0EEEAAAQQQQCB4AhTuwct5FHrszZNplpGWmMjf3KPgS5MIIIAAAggggECdOkyV4V0QAYGtew+plRYs4h4BS5pAIEYCeXl5jz32mC42duzY1NTUGF2VyyCAAAII1EKAwr0WeJxaKrBtb642WQuy1IP/I2CHQE5Ojh2BEiUCCCCAQIkAU2V4I0RAoHRJGdaCjAAmTSCAAAIIIIAAAuUKULiXy8LO6glQuFfPi6MRQAABBBBAAIHqC1C4V9+MM8oIHJ3jnski7mVo2IEAAggggAACCERIgMI9QpDBbiZr35E57iziHux3Ab1HAAEEEEAAgegKULhH1zcIrR88XLDvUL562jyDEfcgJJw+IoAAAggggEB8BFhVJj7uLl01q2QtyEZpyQ3TUlzqF31BwG2BhISEFi1aqI/acLun9A4BBBBwRoDC3ZlUxq0jpRPcWVImbingwgjUQCAlJWXEiBE1OJFTEEAAAQTiJcBUmXjJu3NdFnF3J5f0BAEEEEAAAQQMFqBwP5Kcp556qm3btmlpad27d1+9enW5+Xr00UfPPPPM9PT01q1b33nnnbm5R27H1OPpp5/u0KFDo5JHjx493n77bW9/cP7LWpDByTU9RQABBBBAAIE4ClC411m8ePH48eOnTZu2du3ajh079uvXb+fOnWEpeemllyZOnKhjNmzYMH/+fJ0yefJk75hWrVrdf//9a9as+fjjj3/2s58NHDjws88+Czvd7acU7m7nl965KpCfn6/xCD204Wof6RcCCCDgmACFe505c+ZooufNN9/crl27efPm1atX77nnngtL84oVK3r27DlkyBANzPft2/eGG27wB+YHDBhw+eWXn3766WecccbMmTMbNGiwatWqsNPdfrpt3yF1sAWLuLudZnrnnEBxcfG+koc2nOscHUIAAQTcFAh64Z6Xl6fB8t69e3vpTUxM1PbKlSvDsn3hhRfqMK9Y/+qrr9566y0V62HHFBYWLlq0KDs7WxNmwl5y+6k3x51F3N3OMr1DAAEEEEAAgbgLBH1Vmd27d6vgbtq0qZ8JbW/cuNF/6m1orF1H9urVS0NTBQUFo0aN8qfK6ID169erWNesdw23v/baaxq5DztdTw+XPLz9+/fv14b+PO39hTr0v2VPNHxPUVFxVsmI+4n1k72OGB5wVMOzOpVRlbGu8SCk0v/AasPhFSGDkErrPl81C5hU1szNwLPCUuk9NTBOM0MKeuFexawsX7581qxZc+fO1d2rmzdvHjt27IwZM6ZMmeKdrptW161bp785v/rqqzfddNP7779ftnafPXv29OnTQy+3dOlSTcvx9yxbtszftmhjX16d/MLkhDrFa/7593UsBl2SOUtTadG7Lmahup1KjVl4ku+++25SUlLMVONyIbdTGRfSeF2UVMZLPuLX9VOZk5MT8cYdbjAh4LMbNVVG1bMK7kGDBnlpVuW9d+/eN954IzTrF1100QUXXPDggw96OxcuXDhy5MiDBw9qak3oYdrWTJtTTz31mWeeCdsfNuKupWk0hK+laHSYftfU27dPnz5aVjnsLPOfrvtu77X/d7W+M/WDCT8xP9poR2h1KqONY1f7QUilfvo99NBDysuECRNSU1PtSlDVow1CKquuYfWRpNLq9IUGH5ZKTUM44YQTNPrpFUWhR7JdViDoI+7656pLly7vvfeeV7gXFRVpe8yYMWFS+nUwtEb3RqfK/Z1HLahGDztdT+uWPEL3q0wPrdTDnoYeafL2zoMFCq9lZnpoX0wOOAaxWZrKGMhYdwm3U+n/BHO7m967Lgh9tO7zVbOASWXN3Aw8y0+lNgwMz9iQgl64KzFaC1Kj7F27du3WrZtWRtPdpVphRvuHDRvWsmVLTXHRtpaO0eIznTt39qbKaJKM9njl+6RJk/r373/SSScdOHBAq0ZqUo3+7mxsviMeGGtBRpyUBhGIjYDmtTdp0kTXcniCe2wkuQoCCCAQMwEK9zqDBw/etWvX1KlTt2/f3qlTp3feece7V3XLli3+KPs999yjf9v0361bt+qfOlXtWvnRS5IWfVeJn5WVlZGRoW9iUtWuSS8xy1/cL7R1r7cWZHrcIyEABBColoBGuUaPHl2tUzgYAQQQQCC+AhTuR/w1N6bs9BiNnfu5SU5O1rcv6eHv8Tf0fUz+dgA3vCVlWrKIewBzT5cRQAABBBBAILYC4fdWxvbqXM16AW8R9+YZjLhbn0o6gAACCCCAAAKGCzDibniCTA+POe6mZ4j4EKhAQAs7PPvss3pRXx3NzWEVILEbAQQQMEuAwt2sfNgVTW5+4Q/ZeYpZq8rYFTnRIoCAVpXR7T1y8JeXwQQBBBBAwHABpsoYniCjw/OG2+unJjVK5zdAozNFcAgggAACCCDggACFuwNJjFsXvAnuLTLTWU4ubjngwggggAACCCAQGAEK98CkOgodZYJ7FFBpEgEEEEAAAQQQKF+Awr18F/ZWRYBF3KuixDEIIIAAAggggEBEBCjcI8IY0EZYxD2giafbCCCAAAIIIBAPAe4pjIe6K9dkEXdXMkk/giigW1P0fc/qOfeoBDH99BkBBOwUoHC3M29mRM0cdzPyQBQI1ERAa7ePGzeuJmdyDgIIIIBAnASYKhMnePsvq7WfvTnuLOJufzLpAQIIIIAAAghYIEDhbkGSzAxxT3be4YKihIQ6TTPqmhkhUSGAAAIIIIAAAi4JMFXGpWzGtC/eBPcmDerWTU6K6YW5GAIIREIgPz9/wYIFamn48OGaNhOJJmkDAQQQQCC6AhTu0fV1uHXWgnQ4uXQtCAKa7bZt2zb1VBtB6C99RAABBBwQYKqMA0mMTxe8O1OZ4B4ffa6KAAIIIIAAAsEToHAPXs4j1GNvEfcWmWkRao9mEEAAAQQQQAABBCoTYKpMZTpWv1ZYVLz66z07D+Se2DCt28nHJSUmRLA7anz99/vU4OH8Im1HtvEIxklTCCCAAAIIIICAMwIU7s6k8kcdeefTrOl/+TxrX663t3lG2rQB7S47t/mPDqrpk9DG/9+qb5dt2BHBxmsaFOchgAACCCCAAAKOCzBVxsEEq7C+beFav2pXD7fvy9Ue7a99b6PaeO3DowUEEEAAAQQQQMBVAUbcXcusJq5orD1skQg91USZ3/75856nnVCbaS1qfNqfPyu3cV20T7tmtWnctUzQHwSMF6hXr57xMRIgAggggMD/BCjc/2fhxpbmtYeOtfudUrW9fX9u+98u9fdEcEON66K6dI9Tj49gszSFAALRE0hNTb3rrrui1z4tI4AAAghEXICpMhEnjXODuhs1XhHE8dLx6jLXRQABBBBAAAEEYibAiHvMqGN0Ia0hU8mVFtx8vlaYqeSAyl/SmPrw5z+q6JjKL13RWexHAAEEEEAAAQQQqIoAhXtVlGw6RnW51pDR3ahhM9E1x71ZRtpFpzepzTR0nV5J47X5lcAmYmJFwAmB/Pz8F198UV0ZOnRoSkqKE32iEwgggIDjAkyVcS3Bqsu1OKN6Fbpsu7et/bWp2tVmVBt3LRP0BwGzBYqLi78teWjD7EiJDgEEEEDgqACFu4NvBa3X/vT/OU/j637ftK09EVnHPaqN+wGzgQACCCCAAAIIIBAmwFSZMBBHnqq81uKMUfrm1Kg27kgC6AYCCCCAAAIIIBBpAQr3SIsa056mtURvccaoNm4MIYEggAACCCCAAAIGCTBVxqBkEAoCCCCAAAIIIIAAAhUJULhXJMN+BBBAAAEEEEAAAQQMEmCqjEHJIBQEEEAglgKsAhlLba6FAAII1F6Awr32hrSAAAII2CeQmpo6efJk++ImYgQQQCDAAkyVCXDy6ToCCCCAAAIIIICAPQIU7vbkikgRQAABBBBAAAEEAizAVJkAJ5+uI4BAgAUKCgqWLFkigOuuuy45mX8LAvxWoOsIIGCPAD+s7ckVkSKAAAKREygqKtq0aZPa00bkWqUlBBBAAIEoCjBVJoq4NI0AAggggAACCCCAQKQEKNwjJUk7CCCAAAIIIIAAAghEUYDCPYq4NI0AAggggAACCCCAQKQEKNwjJUk7CCCAAAIIIIAAAghEUYDCPYq4NI0AAggggAACCCCAQKQEWFUmUpLVaKe4uFhH79+/3zsnPz8/JydHT/n68WogGnkoqTQyLTUJKgipzMvLy83NlY5++OhbVGvCZMM5QUilDXmIQIykMgKIZjQRlkqvHPJKIzMCNDqKBKRin5/vv/++devWsb8uV0QAAQQQQAABBAwU+O6771q1amVgYKaFROEeh4xo1eRt27Y1bNgwISFBl9fvmqrj9ZZt1KhRHKLhkpETIJWRs4xzS6QyzgmI3OVJZeQs49wSqYxzAiJ3+bBUagT5wIEDLVq0SExk/vaxlZkqc2yjiB+ht2bZXytVtVO4R5w6Lg2SyriwR+OipDIaqnFpk1TGhT0aFyWV0VCNS5uhqczIyIhLDDZelF9ubMwaMSOAAAIIIIAAAggEToDCPXApp8MIIIAAAggggAACNgok/fa3v7UxbsdiTkpKuuSSS5KTmblkfWJJpfUpLO0AqSyVsP7/pNL6FJZ2gFSWSlj/f1JZ4xRyc2qN6TgRAQQQQAABBBBAAIHYCTBVJnbWXAkBBBBAAAEEEEAAgRoLULjXmI4TEUAAAQQQQAABBBCInQCFe+ysuRICCCCAAAIIIIAAAjUWoHCvMR0nIoAAAggggAACCCAQOwEK99hZl3ulp556qm3btmlpad27d1+9enW5x7DTcAEtzaQvwfUfZ511luEBE16YwAcffDBgwAB9b5+S+Prrr/uv6vv8pk6d2rx58/T09N69e2/atMl/iQ0zBSpK5fDhw/1PqDYuu+wyM+MnKk9g9uzZ559/vr5f/MQTTxw0aNAXX3zhy+Tm5t5+++3HH398gwYNrr766h07dvgvsWGmQCXZ1Hp6oR/MUaNGmdkFo6KicI9nOhYvXjx+/Php06atXbu2Y8eO/fr127lzZzwD4to1FTjnnHOySh8ffvhhTZvhvPgIZGdn6wOo36LDLv/AAw88/vjj8+bN+9e//lW/fn19QlU0hB3DU6MEKkqlglSxXvoZzXr55ZeNCptgwgTef/99VeerVq1atmxZfn5+3759lVnvmDvvvPMvf/nLK6+8omO2bdt21VVXhZ3LU9MEKsmmQh0xYoT/wdSPXNOCNzEeDSnxiJdAt27d9LPJu3phYaEG/PSLabyC4bo1FtCvXir7anw6J5ojoJ/Rr732mhdPUVFRs2bNHnzwQe/p3r1769atq4LPnGiJpBKB0FTqsJtuumngwIGVHM9Lxgp441kq/hShPoYpKSmq2r1oN2zYoESvXLnS2OAJLEwgNJt66eKLLx47dmzYMTytXIAR97j9NpWXl7dmzRr9/d2LIDExUdv6ARS3gLhwLQQ0iUK/d51yyilDhw7dsmVLLVriVFMEvv766+3bt/uf0IyMDM1n4xNqSnqqH8fy5cs17+LMM8+87bbbfvjhh+o3wBnxEdi3b58ufNxxx+m/+kdTA/D+p1LzEk866SQ+lfFJTI2uGppNr4EXX3zxhBNOOPfccydNmpSTk1OjVoN1El/VGbd87969W6PsTZs29SPQ9saNG/2nbNgioHpuwYIFKgj0977p06dfdNFFn376qWZn2hI/cZYroKpd+8M+od7Oco9np8kCmiejORUnn3zyl19+OXny5P79+6va03c3mhwzsUlAf/gaN25cz549VdjpqT6AqampmZmZPo4+oXwqfQ3DN8KyqWiHDBnSpk0bDXv95z//+c1vfqObGf70pz8Z3ou4h0fhHvcUEID1AioCvD506NBBRbx+DC1ZsuSWW26xvmN0AAFXBK6//nqvK+3bt9fn9NRTT9UA/KWXXupK/5zth2aTahyEG4fcSHDZbI4cOdL/YGoZAH0k9au1Pp5u9DdKvWCqTJRgj92s/jak8Z7QO+K1rTm1xz6TIwwW0FDQGWecsXnzZoNjJLQqCXgfRj6hVcKy6iBNadOPXz6k5idtzJgxb7755t///vdWrVp50epTqVmmmunuB8+/mz6F4RtlsxkWsIa9tIcPZhhL2acU7mVNYrRHf+/r0qXLe++9511Pf0LSdo8ePWJ0eS4THYGDBw9qwEAjB9FpnlZjJ6BpFaoS/E/o/v37tbYMn9DYJSBqV/r+++81x50PadSAI9Cwbs5Tnac7xf/2t7/pk+i3qH80dXOq/6nUzArdU8Sn0vcxc6OibIZFu27dOu3hgxnGUvZpkpagLruXPbERaNSo0ZQpU1q3bq3VKrShd+38+fO1Nm1srs5VIiUwYcIEZVCtff7551qGVnfNawFBrR4YqfZpJ9oC+nVLudNM2WeeeUajPlq1XaN6+uOJ7kKZNWtWu3bt9PSOO+7QjVNPPPFEcjIzDKOdkJq3X24q9bfNu+++Wz9vCwoKdHejprHpx+zDDz9MKmsOHeUzNadC9yy++uqrmv2snOqhJKpk13eeaAnIJ598slOnTnv27PnVr36lf0C1rleUw6H5WglUlE0NcmkRXn0Y9QNW38Cgfz01k23ixIm1ulgQTq580RlejbaA6gDdFK/Rdy0NqTVro3052o+GwODBgzVIoCS2bNlS2/pLXzSuQpvRE9Df4sN+2mv1QF1OfwfTb9S6+02/mGnypYb3ohcDLUdEoNxU6jcuLQTepEkTVX66BUXrRuuXtIhcjkaiJBD2edTT559/3rvWoUOHRo8e3bhx43r16l155ZVaEiBKMdBspAQqyqb+WvKTn/xE6wXpB+xpp5121113ac2ZSF3U4XYS1LeypuxBAAEEEEAAAQQQQAABowSY425UOggGAQQQQAABBBBAAIHyBSjcy3dhLwIIIIAAAggggAACRglQuBuVDoJBAAEEEEAAAQQQQKB8AQr38l3YiwACCCCAAAIIIICAUQIU7kalg2AQQAABBBBAAAEEEChfgMK9fBf2IoAAAggggAACCCBglACFu1HpIBgEEEAAAQQQQAABBMoXoHAv34W9CCCAAAIIIIAAAggYJUDhblQ6CAYBBBCoocCuXbtuu+02fROzvoawWbNm/fr1++c//6m2EhISXn/99Ro2ymkIIIAAAiYJJJsUDLEggAACCNRQ4Oqrr87Ly3vhhRdOOeWUHTt2vPfeez/88EMN2+I0BBBAAAEjBRhxNzItBIUAAghUR2Dv3r3/+Mc/fve73/30pz9t06ZNt27dJk2a9Itf/KJt27Zq5sorr9S4u7etp2+88cZ5552XlpamEn/69OkFBQXepXTM008/3b9///T0dL306quvevv1+8CYMWOaN2+uU9T47Nmzvf38FwEEEEAgxgIU7jEG53IIIIBA5AUalDw0Jebw4cOhrX/00Ud6+vzzz2dlZXnbqu+HDRs2duzYzz///JlnnlmwYMHMmTP9U6ZMmaKR+08++WTo0KHXX3/9hg0b9NLjjz/+5z//ecmSJV988cWLL77o/wLgn8UGAggggEBsBBKKi4tjcyWuggACCCAQPYE//vGPI0aMOHTokEbTL774YpXdHTp00OU0jv7aa68NGjTIu3Tv3r0vvfRSjcd7TxcuXPjrX/9627Zt3pGjRo3SoLv30gUXXKCm5s6de8cdd3z22Wd//etf1ZT3Ev9FAAEEEIiLACPucWHnoggggECEBTRSrvpbQ+OXXXbZ8uXLVXNrNL3sNTSafu+993oj9Pqvan0Nxufk5HhH9ujRwz9F296I+/Dhw9etW3fmmWeqgl+6dKl/ABsIIIAAAjEWoHCPMTiXQwABBKIloDnoffr00XSXFStWqNqeNm1a2SsdPHhQ89pViHuP9evXb9q0SSeWPdLfo98Bvv766xkzZmg4/7rrrrvmmmv8l9hAAAEEEIilAIV7LLW5FgIIIBAjgXbt2mVnZ+tiKSkphYWF/lVVhWuq+mk/fiQmHv23YNWqVf6R2j777LO9p40aNRo8ePCzzz67ePFizcnZs2ePfxgbCCCAAAIxE2A5yJhRcyEEEEAgWgJa+fHaa6/95S9/qXntDRs2/Pjjjx944IGBAwfqerqXVEtD9uzZU+u7N27ceOrUqT//+c+13LsGzlWva+bMp59+et9993mRvfLKK127du3Vq5duQl29evX8+fO1f86cOVpSpnPnzjpeB2iR+MzMzGj1hHYRQAABBCoWoHCv2IZXEEAAAUsENFu9e/fujzzyyJdffpmfn9+6dWtNXp88ebLCf/jhh8ePH6/B8pYtW37zzTf6YqY333xT09y1dqQG488666xbb73V76Vm0SxatGj06NGq1F9++WUN2+sl/SagXwM0oyYpKen8889/6623/BF6/0Q2EEAAAQRiIMCqMjFA5hIIIICABQJh689YEDEhIoAAAgETYI57wBJOdxFAAAEEEEAAAQTsFKBwtzNvRI0AAggggAACCCAQMAGmygQs4XQXAQQQQAABBBBAwE4BRtztzBtRI4AAAggggAACCARMgMI9YAmnuwgggAACCCCAAAJ2ClC425k3okYAAQQQQAABBBAImACFe8ASTncRQAABBBBAAAEE7BSgcLczb0SNAAIIIIAAAgggEDABCveAJZzuIoAAAggggAACCNgpQOFuZ96IGgEEEEAAAQQQQCBgAhTuAUs43UUAAQQQQAABBBCwU4DC3c68ETUCCCCAAAIIIIBAwAQo3AOWcLqLAAIIIIAAAgggYKcAhbudeSNqBBBAAAEEEEAAgYAJULgHLOF0FwEEEEAAAQQQQMBOAQp3O/NG1AgggAACCCCAAAIBE6BwD1jC6S4CCCCAAAIIIICAnQIU7nbmjagRQAABBBBAAAEEAiZA4R6whNNdBBBAAAEEEEAAATsFKNztzBtRI4AAAggggAACCARMgMI9YAmnuwgggAACCCCAAAJ2ClC425k3okYAAQQQQAABBBAImACFe8ASTncRQAABBBBAAAEE7BSgcLczb0SNAAIIIIAAAgggEDABCveAJZzuIoAAAggggAACCNgp8P8BhKg2I1BqsPcAAAAASUVORK5CYII=)\n" + ], + "metadata": { + "id": "xHF95Kr4CzGq" + } + }, + { + "cell_type": "markdown", + "source": [ + "\n", + "# Installation\n", + "\n", + "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq` from the extra packages.\n", + "\n", + " ```bash\n", + " pip install adalflow[openai,groq]\n", + " ```\n", + "2. Setup `openai` and `groq` API key in the environment variables\n", + "\n", + "You can choose to use different client. You can import the model client you prefer. We support `Anthropic`, `Cohere`, `Google`, `GROQ`, `OpenAI`, `Transformer` and more in development. We will use OpenAI here as an example.Please refer to our [full installation guide](https://adalflow.sylph.ai/get_started/installation.html)" + ], + "metadata": { + "id": "Kof5M6DRaKhh" + } + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "id": "tAp3eDjOCma1" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai] # also install the package for the model client you'll use\n", + "!pip install datasets\n", + "clear_output()" + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Set Environment Variables\n", + "\n", + "Run the following code and pass your api key.\n", + "\n", + "Note: for normal `.py` projects, follow our [official installation guide](https://lightrag.sylph.ai/get_started/installation.html).\n", + "\n", + "*Go to [OpenAI](https://platform.openai.com/docs/introduction) to get API keys if you don't already have.*" + ], + "metadata": { + "id": "KapUyHMM07pJ" + } + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "\n", + "\n", + "# Set environment variables\n", + "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ONfzF9Puzdd_", + "outputId": "e5c3cfc5-69cb-448a-c248-a8cebda5ba71" + }, + "execution_count": 43, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "API keys have been set.\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "from dataclasses import dataclass, field\n", + "from typing import List, Dict, Union, Optional, Tuple, Any, Callable\n", + "from datasets import load_dataset\n", + "from adalflow.components.model_client import OpenAIClient\n", + "import adalflow as adal\n", + "from adalflow.core.component import Component\n", + "from adalflow.datasets.types import TrecData\n", + "from adalflow.eval.answer_match_acc import AnswerMatchAcc\n", + "\n", + "\n", + "_COARSE_LABELS = [\n", + " \"ABBR\",\n", + " \"DESC\",\n", + " \"ENTY\",\n", + " \"HUM\",\n", + " \"LOC\",\n", + " \"NUM\"\n", + "]\n", + "\n", + "_COARSE_LABELS_DESC = [\n", + " \"Abbreviation: Questions about abbreviations and their meanings\",\n", + " \"Description: Questions seeking descriptions of people, things, or concepts\",\n", + " \"Entity: Questions about entities (e.g., animals, colors, inventions)\",\n", + " \"Human: Questions about people or organizations\",\n", + " \"Location: Questions about places, cities, countries\",\n", + " \"Numeric: Questions seeking numeric answers (e.g., dates, amounts, distances)\"\n", + "]\n", + "\n", + "\n", + "template = r\"\"\"\n", + " {{system_prompt}}\n", + " {% if output_format_str is not none %}\n", + " {{output_format_str}}\n", + " {% endif %}\n", + " {% if few_shot_demos is not none %}\n", + " Here are some examples:\n", + " {{few_shot_demos}}\n", + " {% endif %}\n", + " \n", + " \n", + " {{input_str}}\n", + " \n", + " \"\"\"\n", + "\n", + "task_desc_template = r\"\"\"You are a classifier. Given a question, you need to classify it into one of the following classes:\n", + " Format: class_index. class_name, class_description\n", + " {% if classes %}\n", + " {% for class in classes %}\n", + " {{loop.index-1}}. {{class.label}}, {{class.desc}}\n", + " {% endfor %}\n", + " {% endif %}\n", + " - Do not try to answer the question:\n", + " \"\"\"\n", + "\n", + "@dataclass\n", + "class TRECExtendedData(TrecData):\n", + " rationale: str = field(\n", + " metadata={\n", + " \"desc\": \"Your step-by-step reasoning to classify the question to class_name\"\n", + " },\n", + " default=None,\n", + " )\n", + " __input_fields__ = [\"question\"]\n", + " __output_fields__ = [\"rationale\", \"class_name\"] # it is important to have the rationale before the class_name" + ], + "metadata": { + "id": "ZZIEtZYHNVjo" + }, + "execution_count": 49, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "class TRECClassifierStructuredOutput(adal.Component):\n", + "\n", + " def __init__(self, model_client: adal.ModelClient, model_kwargs: Dict):\n", + " super().__init__()\n", + "\n", + " label_desc = [\n", + " {\"label\": label, \"desc\": desc}\n", + " for label, desc in zip(_COARSE_LABELS, _COARSE_LABELS_DESC)\n", + " ]\n", + "\n", + " task_desc_str = adal.Prompt(\n", + " template=task_desc_template, prompt_kwargs={\"classes\": label_desc}\n", + " )()\n", + "\n", + " self.data_class = TRECExtendedData\n", + " self.data_class.set_task_desc(task_desc_str)\n", + "\n", + " self.parser = adal.DataClassParser(\n", + " data_class=self.data_class, return_data_class=True, format_type=\"yaml\"\n", + " )\n", + "\n", + " prompt_kwargs = {\n", + " \"system_prompt\": adal.Parameter(\n", + " data=self.parser.get_task_desc_str(),\n", + " role_desc=\"Task description\",\n", + " requires_opt=True,\n", + " param_type=adal.ParameterType.PROMPT,\n", + " ),\n", + " \"output_format_str\": adal.Parameter(\n", + " data=self.parser.get_output_format_str(),\n", + " role_desc=\"Output format requirements\",\n", + " requires_opt=False,\n", + " param_type=adal.ParameterType.PROMPT,\n", + " ),\n", + " \"few_shot_demos\": adal.Parameter(\n", + " data=None,\n", + " requires_opt=True,\n", + " role_desc=\"Few shot examples to help the model\",\n", + " param_type=adal.ParameterType.DEMOS,\n", + " ),\n", + " }\n", + "\n", + " self.llm = adal.Generator(\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " prompt_kwargs=prompt_kwargs,\n", + " template=template,\n", + " output_processors=self.parser,\n", + " use_cache=True,\n", + " )\n", + "\n", + " def _prepare_input(self, question: str):\n", + " input_data = self.data_class(question=question)\n", + " input_str = self.parser.get_input_str(input_data)\n", + " prompt_kwargs = {\n", + " \"input_str\": adal.Parameter(\n", + " data=input_str, requires_opt=False, role_desc=\"input to the LLM\"\n", + " )\n", + " }\n", + " return prompt_kwargs\n", + "\n", + " def call(\n", + " self, question: str, id: Optional[str] = None\n", + " ) -> Union[adal.GeneratorOutput, adal.Parameter]:\n", + " prompt_kwargs = self._prepare_input(question)\n", + " output = self.llm(prompt_kwargs=prompt_kwargs, id=id)\n", + " return output" + ], + "metadata": { + "id": "3Q3H9XC4Ncfi" + }, + "execution_count": 50, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "class TrecClassifierAdal(adal.AdalComponent):\n", + " def __init__(\n", + " self,\n", + " model_client: adal.ModelClient,\n", + " model_kwargs: Dict,\n", + " teacher_model_config: Dict,\n", + " backward_engine_model_config: Dict,\n", + " text_optimizer_model_config: Dict,\n", + " ):\n", + " task = TRECClassifierStructuredOutput(model_client, model_kwargs)\n", + " eval_fn = AnswerMatchAcc(type=\"exact_match\").compute_single_item\n", + " loss_fn = adal.EvalFnToTextLoss(\n", + " eval_fn=eval_fn,\n", + " eval_fn_desc=\"exact_match: 1 if str(y) == str(y_gt) else 0\",\n", + " )\n", + " super().__init__(\n", + " task=task,\n", + " eval_fn=eval_fn,\n", + " loss_fn=loss_fn,\n", + " backward_engine_model_config=backward_engine_model_config,\n", + " text_optimizer_model_config=text_optimizer_model_config,\n", + " teacher_model_config=teacher_model_config,\n", + " )\n", + "\n", + " def prepare_task(self, sample: TRECExtendedData):\n", + " return self.task.call, {\"question\": sample.question, \"id\": sample.id}\n", + "\n", + " def prepare_eval(\n", + " self, sample: TRECExtendedData, y_pred: adal.GeneratorOutput\n", + " ) -> float:\n", + " y_label = -1\n", + " if y_pred and y_pred.data is not None and y_pred.data.class_name is not None:\n", + " y_label = y_pred.data.class_name\n", + " return self.eval_fn, {\"y\": y_label, \"y_gt\": sample.class_name}\n", + "\n", + " def prepare_loss(\n", + " self, sample: TRECExtendedData, y_pred: adal.Parameter, *args, **kwargs\n", + " ) -> Tuple[Callable[..., Any], Dict]:\n", + " full_response = y_pred.full_response\n", + " y_label = -1\n", + " if (\n", + " full_response\n", + " and full_response.data is not None\n", + " and full_response.data.class_name is not None\n", + " ):\n", + " y_label = full_response.data.class_name\n", + "\n", + " y_pred.eval_input = y_label\n", + " y_gt = adal.Parameter(\n", + " name=\"y_gt\",\n", + " data=sample.class_name,\n", + " eval_input=sample.class_name,\n", + " requires_opt=False,\n", + " )\n", + " return self.loss_fn, {\"kwargs\": {\"y\": y_pred, \"y_gt\": y_gt}}" + ], + "metadata": { + "id": "HpkQYsh2NevT" + }, + "execution_count": 51, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def train(\n", + " model_client: adal.ModelClient,\n", + " model_kwargs: Dict,\n", + " train_batch_size=4,\n", + " raw_shots: int = 0,\n", + " bootstrap_shots: int = 1,\n", + " max_steps=12,\n", + " num_workers=4,\n", + " strategy=\"constrained\",\n", + " optimization_order=\"sequential\",\n", + " debug=False,\n", + "):\n", + " print(\"Starting training process...\")\n", + "\n", + " # Define the model configuration for all components\n", + " gpt_4o_model = {\n", + " \"model\": \"gpt-4-turbo-preview\",\n", + " \"temperature\": 0,\n", + " \"max_tokens\": 1000,\n", + " \"top_p\": 1,\n", + " \"frequency_penalty\": 0,\n", + " \"presence_penalty\": 0\n", + " }\n", + " print(f\"Component model configuration: {gpt_4o_model}\")\n", + "\n", + " try:\n", + " print(\"Initializing ADAL component...\")\n", + " adal_component = TrecClassifierAdal(\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " text_optimizer_model_config=gpt_4o_model,\n", + " backward_engine_model_config=gpt_4o_model,\n", + " teacher_model_config=gpt_4o_model,\n", + " )\n", + " print(\"ADAL component initialized successfully\")\n", + "\n", + " print(\"Initializing trainer...\")\n", + " trainer = adal.Trainer(\n", + " train_batch_size=train_batch_size,\n", + " adaltask=adal_component,\n", + " strategy=strategy,\n", + " max_steps=max_steps,\n", + " num_workers=num_workers,\n", + " raw_shots=raw_shots,\n", + " bootstrap_shots=bootstrap_shots,\n", + " debug=debug,\n", + " weighted_sampling=True,\n", + " optimization_order=optimization_order,\n", + " exclude_input_fields_from_bootstrap_demos=True,\n", + " )\n", + " print(\"Trainer initialized successfully\")\n", + "\n", + " print(\"Loading datasets...\")\n", + " train_dataset, val_dataset, test_dataset = load_datasets()\n", + " print(f\"Datasets loaded - Train size: {len(train_dataset)}, Val size: {len(val_dataset)}, Test size: {len(test_dataset)}\")\n", + "\n", + " print(\"Starting model training...\")\n", + " trainer.fit(\n", + " train_dataset=train_dataset,\n", + " val_dataset=test_dataset,\n", + " debug=debug,\n", + " )\n", + " print(\"Training completed successfully\")\n", + "\n", + " except Exception as e:\n", + " print(f\"Error occurred: {str(e)}\")\n", + " raise" + ], + "metadata": { + "id": "PEj6xiZ5dVaj" + }, + "execution_count": 52, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.components.model_client.openai_client import OpenAIClient\n", + "\n", + "\n", + "gpt_4o_model = {\n", + " \"model_client\": OpenAIClient(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-4o-mini\",\n", + " \"max_tokens\": 2000,\n", + "\n", + " },\n", + "}\n", + "\n", + "\n", + "train(\n", + " model_client=OpenAIClient(),\n", + " model_kwargs=gpt_4o_model,\n", + " )" + ], + "metadata": { + "id": "GnlZBQOMEj6E", + "collapsed": true + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "# Issues and feedback\n", + "\n", + "If you encounter any issues, please report them here: [GitHub Issues](https://github.com/SylphAI-Inc/LightRAG/issues).\n", + "\n", + "For feedback, you can use either the [GitHub discussions](https://github.com/SylphAI-Inc/LightRAG/discussions) or [Discord](https://discord.gg/ezzszrRZvT)." + ], + "metadata": { + "id": "AmkbyxmuruUu" + } + } + ] +} diff --git a/notebooks/tutorials/adalflow_rag_optimization.ipynb b/notebooks/tutorials/adalflow_rag_optimization.ipynb new file mode 100644 index 00000000..7ae0b152 --- /dev/null +++ b/notebooks/tutorials/adalflow_rag_optimization.ipynb @@ -0,0 +1,495 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# 🤗 Welcome to AdalFlow!\n", + "## The PyTorch library to auto-optimize any LLM task pipelines\n", + "\n", + "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", + "\n", + "\n", + "# Quick Links\n", + "\n", + "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", + "\n", + "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", + "\n", + "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", + "\n", + "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", + "\n", + "## 📖 Outline\n", + "\n", + "In this tutorial, we will cover the auto-optimization of a standard RAG:\n", + "\n", + "- Introducing HotPotQA dataset and HotPotQAData class.\n", + "\n", + "- Convert Dspy’s Retriever to AdalFlow’s Retriever to easy comparison.\n", + "\n", + "- Build the standard RAG with Retriever and Generator components.\n", + "\n", + "- Learn how to connect the output-input between components to enable auto-text-grad optimization." + ], + "metadata": { + "id": "xHF95Kr4CzGq" + } + }, + { + "cell_type": "markdown", + "source": [ + "\n", + "# Installation\n", + "\n", + "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq` from the extra packages.\n", + "\n", + " ```bash\n", + " pip install adalflow[openai,groq]\n", + " ```\n", + "2. Setup `openai` and `groq` API key in the environment variables\n", + "\n", + "You can choose to use different client. You can import the model client you prefer. We support `Anthropic`, `Cohere`, `Google`, `GROQ`, `OpenAI`, `Transformer` and more in development. We will use OpenAI here as an example.Please refer to our [full installation guide](https://adalflow.sylph.ai/get_started/installation.html)" + ], + "metadata": { + "id": "Kof5M6DRaKhh" + } + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "id": "tAp3eDjOCma1" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai] # also install the package for the model client you'll use\n", + "!pip install dspy\n", + "!pip install datasets\n", + "clear_output()" + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Set Environment Variables\n", + "\n", + "Run the following code and pass your api key.\n", + "\n", + "Note: for normal `.py` projects, follow our [official installation guide](https://lightrag.sylph.ai/get_started/installation.html).\n", + "\n", + "*Go to [OpenAI](https://platform.openai.com/docs/introduction) to get API keys if you don't already have.*" + ], + "metadata": { + "id": "KapUyHMM07pJ" + } + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "\n", + "\n", + "# Set environment variables\n", + "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ONfzF9Puzdd_", + "outputId": "5fc0cd30-9ae7-443a-c06c-31e9edeafd69" + }, + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "API keys have been set.\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "import dspy\n", + "import re\n", + "from typing import List, Union, Optional, Dict, Callable, Any, Tuple\n", + "from dataclasses import dataclass, field\n", + "import adalflow as adal\n", + "from adalflow.optim.parameter import Parameter, ParameterType\n", + "from adalflow.datasets.hotpot_qa import HotPotQA, HotPotQAData\n", + "from adalflow.datasets.types import Example\n", + "from adalflow.core.types import RetrieverOutput\n", + "from adalflow.core import Component, Generator\n", + "from adalflow.core.retriever import Retriever\n", + "from adalflow.core.component import fun_to_component\n", + "from adalflow.components.model_client.openai_client import OpenAIClient" + ], + "metadata": { + "id": "aE3I05BqOmd7" + }, + "execution_count": 20, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "\n", + "gpt_4o_model = {\n", + " \"model_client\": OpenAIClient(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-4o-mini\",\n", + " \"max_tokens\": 2000,\n", + " },\n", + "}\n", + "\n", + "gpt_3_model = {\n", + " \"model_client\": OpenAIClient(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-3.5-turbo\",\n", + " \"max_tokens\": 2000,\n", + " },\n", + "}" + ], + "metadata": { + "id": "cqUUoua9fUxQ" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def load_datasets():\n", + "\n", + " trainset = HotPotQA(split=\"train\", size=20)\n", + " valset = HotPotQA(split=\"val\", size=50)\n", + " testset = HotPotQA(split=\"test\", size=50)\n", + " print(f\"trainset, valset: {len(trainset)}, {len(valset)}, example: {trainset[0]}\")\n", + " return trainset, valset, testset\n", + "\n", + "\n", + "@dataclass\n", + "class AnswerData(adal.DataClass):\n", + " reasoning: str = field(\n", + " metadata={\"desc\": \"The reasoning to produce the answer\"},\n", + " )\n", + " answer: str = field(\n", + " metadata={\"desc\": \"The answer you produced\"},\n", + " )\n", + "\n", + " __output_fields__ = [\"reasoning\", \"answer\"]\n", + "\n", + "\n", + "dataset = HotPotQA(split=\"train\", size=20)\n", + "print(dataset[0], type(dataset[0]))\n", + "\n", + "HotPotQAData(id='5a8b57f25542995d1e6f1371', question='Were Scott Derrickson and Ed Wood of the same nationality?', answer='yes', gold_titles=\"{'Scott Derrickson', 'Ed Wood'}\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "0irHeHUkOmL8", + "outputId": "61f778a2-9ec1-4fda-daa2-bcc7f31baa78" + }, + "execution_count": 22, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "HotPotQAData(id='5a8b57f25542995d1e6f1371', question='Were Scott Derrickson and Ed Wood of the same nationality?', answer='yes', gold_titles=\"{'Scott Derrickson', 'Ed Wood'}\") \n" + ] + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "HotPotQAData(id='5a8b57f25542995d1e6f1371', question='Were Scott Derrickson and Ed Wood of the same nationality?', answer='yes', gold_titles=\"{'Scott Derrickson', 'Ed Wood'}\")" + ] + }, + "metadata": {}, + "execution_count": 22 + } + ] + }, + { + "cell_type": "code", + "source": [ + "class DspyRetriever(adal.Retriever):\n", + " def __init__(self, top_k: int = 3):\n", + " super().__init__()\n", + " self.top_k = top_k\n", + " self.dspy_retriever = dspy.Retrieve(k=top_k)\n", + "\n", + " def call(self, input: str, top_k: Optional[int] = None) -> List[adal.RetrieverOutput]:\n", + "\n", + " k = top_k or self.top_k\n", + "\n", + " output = self.dspy_retriever(query_or_queries=input, k=k)\n", + " final_output: List[RetrieverOutput] = []\n", + " documents = output.passages\n", + "\n", + " final_output.append(\n", + " RetrieverOutput(\n", + " query=input,\n", + " documents=documents,\n", + " doc_indices=[],\n", + " )\n", + " )\n", + " return final_output\n", + "\n", + "def test_retriever():\n", + " question = \"How many storeys are in the castle that David Gregory inherited?\"\n", + " retriever = DspyRetriever(top_k=3)\n", + " retriever_out = retriever(input=question)\n", + " print(f\"retriever_out: {retriever_out}\")\n", + "\n", + "\n", + "def call(\n", + " self, question: str, id: Optional[str] = None\n", + " ) -> Union[adal.GeneratorOutput, adal.Parameter]:\n", + " prompt_kwargs = self._prepare_input(question)\n", + " output = self.llm(prompt_kwargs=prompt_kwargs, id=id)\n", + " return output\n", + "\n", + "\n", + "def call(self, question: str, id: str = None) -> adal.GeneratorOutput:\n", + " if self.training:\n", + " raise ValueError(\n", + " \"This component is not supposed to be called in training mode\"\n", + " )\n", + "\n", + " retriever_out = self.retriever.call(input=question)\n", + "\n", + " successor_map_fn = lambda x: ( # noqa E731\n", + " \"\\n\\n\".join(x[0].documents) if x and x[0] and x[0].documents else \"\"\n", + " )\n", + " retrieved_context = successor_map_fn(retriever_out)\n", + "\n", + " prompt_kwargs = {\n", + " \"context\": retrieved_context,\n", + " \"question\": question,\n", + " }\n", + "\n", + " output = self.llm.call(\n", + " prompt_kwargs=prompt_kwargs,\n", + " id=id,\n", + " )\n", + " return output\n", + "\n", + "\n", + "def forward(self, question: str, id: str = None) -> adal.Parameter:\n", + " if not self.training:\n", + " raise ValueError(\"This component is not supposed to be called in eval mode\")\n", + " retriever_out = self.retriever.forward(input=question)\n", + " successor_map_fn = lambda x: ( # noqa E731\n", + " \"\\n\\n\".join(x.data[0].documents)\n", + " if x.data and x.data[0] and x.data[0].documents\n", + " else \"\"\n", + " )\n", + " retriever_out.add_successor_map_fn(successor=self.llm, map_fn=successor_map_fn)\n", + " generator_out = self.llm.forward(\n", + " prompt_kwargs={\"question\": question, \"context\": retriever_out}, id=id\n", + " )\n", + " return generator_out\n", + "\n", + "\n", + "def bicall(\n", + " self, question: str, id: str = None\n", + ") -> Union[adal.GeneratorOutput, adal.Parameter]:\n", + " \"\"\"You can also combine both the forward and call in the same function.\n", + " Supports both training and eval mode by using __call__ for GradComponents\n", + " like Retriever and Generator\n", + " \"\"\"\n", + " retriever_out = self.retriever(input=question)\n", + " if isinstance(retriever_out, adal.Parameter):\n", + " successor_map_fn = lambda x: ( # noqa E731\n", + " \"\\n\\n\".join(x.data[0].documents)\n", + " if x.data and x.data[0] and x.data[0].documents\n", + " else \"\"\n", + " )\n", + " retriever_out.add_successor_map_fn(\n", + " successor=self.llm, map_fn=successor_map_fn\n", + " )\n", + " else:\n", + " successor_map_fn = lambda x: ( # noqa E731\n", + " \"\\n\\n\".join(x[0].documents) if x and x[0] and x[0].documents else \"\"\n", + " )\n", + " retrieved_context = successor_map_fn(retriever_out)\n", + " prompt_kwargs = {\n", + " \"context\": retrieved_context,\n", + " \"question\": question,\n", + " }\n", + " output = self.llm(prompt_kwargs=prompt_kwargs, id=id)\n", + " return output\n", + "\n", + "task_desc_str = r\"\"\"Answer questions with short factoid answers.\n", + "\n", + "You will receive context(may contain relevant facts) and a question.\n", + "Think step by step.\"\"\"\n", + "\n", + "\n", + "class VanillaRAG(adal.GradComponent):\n", + " def __init__(self, passages_per_hop=3, model_client=None, model_kwargs=None):\n", + " super().__init__()\n", + "\n", + " self.passages_per_hop = passages_per_hop\n", + "\n", + " self.retriever = DspyRetriever(top_k=passages_per_hop)\n", + " self.llm_parser = adal.DataClassParser(\n", + " data_class=AnswerData, return_data_class=True, format_type=\"json\"\n", + " )\n", + " self.llm = Generator(\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " prompt_kwargs={\n", + " \"task_desc_str\": adal.Parameter(\n", + " data=task_desc_str,\n", + " role_desc=\"Task description for the language model\",\n", + " param_type=adal.ParameterType.PROMPT,\n", + " ),\n", + " \"few_shot_demos\": adal.Parameter(\n", + " data=None,\n", + " requires_opt=True,\n", + " role_desc=\"To provide few shot demos to the language model\",\n", + " param_type=adal.ParameterType.DEMOS,\n", + " ),\n", + " \"output_format_str\": self.llm_parser.get_output_format_str(),\n", + " },\n", + " template=answer_template,\n", + " output_processors=self.llm_parser,\n", + " use_cache=True,\n", + " )\n", + "\n", + "\n", + "class VallinaRAGAdal(adal.AdalComponent):\n", + " def __init__(\n", + " self,\n", + " model_client: adal.ModelClient,\n", + " model_kwargs: Dict,\n", + " backward_engine_model_config: Dict | None = None,\n", + " teacher_model_config: Dict | None = None,\n", + " text_optimizer_model_config: Dict | None = None,\n", + " ):\n", + " task = VanillaRAG(\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " passages_per_hop=3,\n", + " )\n", + " eval_fn = AnswerMatchAcc(type=\"fuzzy_match\").compute_single_item\n", + " loss_fn = adal.EvalFnToTextLoss(\n", + " eval_fn=eval_fn, eval_fn_desc=\"fuzzy_match: 1 if str(y) in str(y_gt) else 0\"\n", + " )\n", + " super().__init__(\n", + " task=task,\n", + " eval_fn=eval_fn,\n", + " loss_fn=loss_fn,\n", + " backward_engine_model_config=backward_engine_model_config,\n", + " teacher_model_config=teacher_model_config,\n", + " text_optimizer_model_config=text_optimizer_model_config,\n", + " )\n", + "\n", + " # tell the trainer how to call the task\n", + " def prepare_task(self, sample: HotPotQAData) -> Tuple[Callable[..., Any], Dict]:\n", + " if self.task.training:\n", + " return self.task.forward, {\"question\": sample.question, \"id\": sample.id}\n", + " else:\n", + " return self.task.call, {\"question\": sample.question, \"id\": sample.id}\n", + "\n", + "\n", + " # eval mode: get the generator output, directly engage with the eval_fn\n", + " def prepare_eval(self, sample: HotPotQAData, y_pred: adal.GeneratorOutput) -> float:\n", + " y_label = \"\"\n", + " if y_pred and y_pred.data and y_pred.data.answer:\n", + " y_label = y_pred.data.answer\n", + " return self.eval_fn, {\"y\": y_label, \"y_gt\": sample.answer}\n", + "\n", + "\n", + " # train mode: get the loss and get the data from the full_response\n", + " def prepare_loss(self, sample: HotPotQAData, pred: adal.Parameter):\n", + " # prepare gt parameter\n", + " y_gt = adal.Parameter(\n", + " name=\"y_gt\",\n", + " data=sample.answer,\n", + " eval_input=sample.answer,\n", + " requires_opt=False,\n", + " )\n", + "\n", + " # pred's full_response is the output of the task pipeline which is GeneratorOutput\n", + " pred.eval_input = (\n", + " pred.full_response.data.answer\n", + " if pred.full_response\n", + " and pred.full_response.data\n", + " and pred.full_response.data.answer\n", + " else \"\"\n", + " )\n", + " return self.loss_fn, {\"kwargs\": {\"y\": pred, \"y_gt\": y_gt}}\n", + "\n", + "def train_diagnose(\n", + " model_client: adal.ModelClient,\n", + " model_kwargs: Dict,\n", + ") -> Dict:\n", + "\n", + " trainset, valset, testset = load_datasets()\n", + "\n", + " adal_component = VallinaRAGAdal(\n", + " model_client,\n", + " model_kwargs,\n", + " backward_engine_model_config=gpt_4o_model,\n", + " teacher_model_config=gpt_3_model,\n", + " text_optimizer_model_config=gpt_3_model,\n", + " )\n", + " trainer = adal.Trainer(adaltask=adal_component)\n", + " trainer.diagnose(dataset=trainset, split=\"train\")\n", + " # trainer.diagnose(dataset=valset, split=\"val\")\n", + " # trainer.diagnose(dataset=testset, split=\"test\")\n" + ], + "metadata": { + "id": "ZZIEtZYHNVjo" + }, + "execution_count": 23, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "# Issues and feedback\n", + "\n", + "If you encounter any issues, please report them here: [GitHub Issues](https://github.com/SylphAI-Inc/LightRAG/issues).\n", + "\n", + "For feedback, you can use either the [GitHub discussions](https://github.com/SylphAI-Inc/LightRAG/discussions) or [Discord](https://discord.gg/ezzszrRZvT)." + ], + "metadata": { + "id": "AmkbyxmuruUu" + } + } + ] +} From fddb564d22d235067aedd6fe8469e28bbe5f34c2 Mon Sep 17 00:00:00 2001 From: Ajith Kumar V Date: Thu, 12 Dec 2024 06:34:20 +0000 Subject: [PATCH 35/40] feat: add rag basic tutuorials --- .../tutorials/adalflow_rag_documents.ipynb | 443 ++++++++++++++++++ .../tutorials/adalflow_rag_vanilla.ipynb | 376 +++++++++++++++ tutorials/adalflow_rag_documents.py | 248 ++++++++++ tutorials/adalflow_rag_vanilla.py | 188 ++++++++ ...nescent Guardians of the Desert Canyon.txt | 6 + ...nigmatic Crystal Cavern of Lake Aurora.txt | 6 + .../The Legend of the Moonshade Willow.txt | 6 + 7 files changed, 1273 insertions(+) create mode 100644 notebooks/tutorials/adalflow_rag_documents.ipynb create mode 100644 notebooks/tutorials/adalflow_rag_vanilla.ipynb create mode 100644 tutorials/adalflow_rag_documents.py create mode 100644 tutorials/adalflow_rag_vanilla.py create mode 100644 tutorials/assets/documents/The Bioluminescent Guardians of the Desert Canyon.txt create mode 100644 tutorials/assets/documents/The Enigmatic Crystal Cavern of Lake Aurora.txt create mode 100644 tutorials/assets/documents/The Legend of the Moonshade Willow.txt diff --git a/notebooks/tutorials/adalflow_rag_documents.ipynb b/notebooks/tutorials/adalflow_rag_documents.ipynb new file mode 100644 index 00000000..373f6bae --- /dev/null +++ b/notebooks/tutorials/adalflow_rag_documents.ipynb @@ -0,0 +1,443 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 🤗 Welcome to AdalFlow!\n", + "## The PyTorch library to auto-optimize any LLM task pipelines\n", + "\n", + "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", + "\n", + "\n", + "# Quick Links\n", + "\n", + "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", + "\n", + "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", + "\n", + "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", + "\n", + "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", + "\n", + "# Author\n", + "This notebook was created by community contributor [Ajith](https://github.com/ajithvcoder/).\n", + "\n", + "# Outline\n", + "\n", + "This is a quick introduction of what AdalFlow is capable of. We will cover:\n", + "\n", + "* How to use adalflow for rag with documents\n", + "\n", + "Adalflow can be used in a genric manner for any api provider without worrying much about prompt, \n", + "model args and parsing results\n", + "\n", + "**Next: Try our [adalflow-text-splitter](\"https://colab.research.google.com/github.com/SylphAI-Inc/AdalFlow/blob/main/notebooks/tutorials/adalflow_text_splitter.ipynb\")**\n", + "\n", + "\n", + "# Installation\n", + "\n", + "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq`, and `faiss`(cpu version) from the extra packages.\n", + "\n", + " ```bash\n", + " pip install torch --index-url https://download.pytorch.org/whl/cpu\n", + " pip install sentence-transformers==3.3.1\n", + " pip install adalflow[openai,groq,faiss-cpu]\n", + " ```\n", + "2. Setup `openai` and `groq` API key in the environment variables" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set Environment Variables\n", + "\n", + "Note: Enter your api keys in below cell #todo" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Overwriting .env\n" + ] + } + ], + "source": [ + "%%writefile .env\n", + "\n", + "OPENAI_API_KEY=\"PASTE-OPENAI_API_KEY_HERE\"\n", + "GROQ_API_KEY=\"PASTE-GROQ_API_KEY-HERE\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "from adalflow.utils import setup_env\n", + "\n", + "# Load environment variables - Make sure to have OPENAI_API_KEY in .env file and .env is present in current folder\n", + "setup_env(\".env\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/workspace/ajithdev/AdalFlow/.venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "source": [ + "import os\n", + "import tiktoken\n", + "from typing import List, Dict, Tuple\n", + "import numpy as np\n", + "from sentence_transformers import SentenceTransformer\n", + "from faiss import IndexFlatL2\n", + "\n", + "from adalflow.components.model_client import GroqAPIClient, OpenAIClient\n", + "from adalflow.core.types import ModelType\n", + "from adalflow.utils import setup_env" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`AdalflowRAGPipeline` is a class that implements a Retrieval-Augmented Generation (RAG) pipeline with adalflow using documents. It has:\n", + "\n", + "- Efficient RAG Pipeline for handling large text files, embedding, and retrieval.\n", + "- Supports token management and context truncation for LLM integration.\n", + "- Generates accurate responses using retrieval-augmented generation (RAG)." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "class AdalflowRAGPipeline:\n", + " def __init__(self,\n", + " model_client=None,\n", + " model_kwargs=None,\n", + " embedding_model='all-MiniLM-L6-v2', \n", + " vector_dim=384, \n", + " top_k_retrieval=3,\n", + " max_context_tokens=800):\n", + " \"\"\"\n", + " Initialize RAG Pipeline for handling large text files\n", + " \n", + " Args:\n", + " embedding_model (str): Sentence transformer model for embeddings\n", + " vector_dim (int): Dimension of embedding vectors\n", + " top_k_retrieval (int): Number of documents to retrieve\n", + " max_context_tokens (int): Maximum tokens to send to LLM\n", + " \"\"\"\n", + " # Initialize model client for generation\n", + " self.model_client = model_client\n", + " \n", + " # Initialize tokenizer for precise token counting\n", + " self.tokenizer = tiktoken.get_encoding(\"cl100k_base\")\n", + " \n", + " # Initialize embedding model\n", + " self.embedding_model = SentenceTransformer(embedding_model)\n", + " \n", + " # Initialize FAISS index for vector similarity search\n", + " self.index = IndexFlatL2(vector_dim)\n", + " \n", + " # Store document texts, embeddings, and metadata\n", + " self.documents = []\n", + " self.document_embeddings = []\n", + " self.document_metadata = []\n", + " \n", + " # Retrieval and context management parameters\n", + " self.top_k_retrieval = top_k_retrieval\n", + " self.max_context_tokens = max_context_tokens\n", + " \n", + " # Model generation parameters\n", + " self.model_kwargs = model_kwargs\n", + "\n", + " def load_text_file(self, file_path: str) -> List[str]:\n", + " \"\"\"\n", + " Load a large text file and split into manageable chunks\n", + " \n", + " Args:\n", + " file_path (str): Path to the text file\n", + " \n", + " Returns:\n", + " List[str]: List of document chunks\n", + " \"\"\"\n", + " with open(file_path, 'r', encoding='utf-8') as file:\n", + " # Read entire file\n", + " content = file.read()\n", + " \n", + " # Split content into chunks (e.g., 10 lines per chunk)\n", + " lines = content.split('\\n')\n", + " chunks = []\n", + " chunk_size = 10 # Adjust based on your file structure\n", + " \n", + " for i in range(0, len(lines), chunk_size):\n", + " chunk = '\\n'.join(lines[i:i+chunk_size])\n", + " chunks.append(chunk)\n", + " \n", + " return chunks\n", + "\n", + " def add_documents_from_directory(self, directory_path: str):\n", + " \"\"\"\n", + " Add documents from all text files in a directory\n", + " \n", + " Args:\n", + " directory_path (str): Path to directory containing text files\n", + " \"\"\"\n", + " for filename in os.listdir(directory_path):\n", + " if filename.endswith('.txt'):\n", + " file_path = os.path.join(directory_path, filename)\n", + " document_chunks = self.load_text_file(file_path)\n", + " \n", + " for chunk in document_chunks:\n", + " # Embed document chunk\n", + " embedding = self.embedding_model.encode(chunk)\n", + " \n", + " # Add to index and document store\n", + " self.index.add(np.array([embedding]))\n", + " self.documents.append(chunk)\n", + " self.document_embeddings.append(embedding)\n", + " self.document_metadata.append({\n", + " 'filename': filename,\n", + " 'chunk_index': len(self.document_metadata)\n", + " })\n", + "\n", + " def count_tokens(self, text: str) -> int:\n", + " \"\"\"\n", + " Count tokens in a given text\n", + " \n", + " Args:\n", + " text (str): Input text\n", + " \n", + " Returns:\n", + " int: Number of tokens\n", + " \"\"\"\n", + " return len(self.tokenizer.encode(text))\n", + "\n", + " def retrieve_and_truncate_context(self, query: str) -> str:\n", + " \"\"\"\n", + " Retrieve relevant documents and truncate to fit token limit\n", + " \n", + " Args:\n", + " query (str): Input query\n", + " \n", + " Returns:\n", + " str: Concatenated context within token limit\n", + " \"\"\"\n", + " # Retrieve relevant documents\n", + " query_embedding = self.embedding_model.encode(query)\n", + " distances, indices = self.index.search(\n", + " np.array([query_embedding]), \n", + " self.top_k_retrieval\n", + " )\n", + " \n", + " # Collect and truncate context\n", + " context = []\n", + " current_tokens = 0\n", + " \n", + " for idx in indices[0]:\n", + " doc = self.documents[idx]\n", + " doc_tokens = self.count_tokens(doc)\n", + " \n", + " # Check if adding this document would exceed token limit\n", + " if current_tokens + doc_tokens <= self.max_context_tokens:\n", + " context.append(doc)\n", + " current_tokens += doc_tokens\n", + " else:\n", + " break\n", + " \n", + " return \"\\n\\n\".join(context)\n", + "\n", + " def generate_response(self, query: str) -> str:\n", + " \"\"\"\n", + " Generate a response using retrieval-augmented generation\n", + " \n", + " Args:\n", + " query (str): User's input query\n", + " \n", + " Returns:\n", + " str: Generated response incorporating retrieved context\n", + " \"\"\"\n", + " # Retrieve and truncate context\n", + " retrieved_context = self.retrieve_and_truncate_context(query)\n", + " \n", + " # Construct context-aware prompt\n", + " full_prompt = f\"\"\"\n", + " Context Documents:\n", + " {retrieved_context}\n", + " \n", + " Query: {query}\n", + " \n", + " Generate a comprehensive response that:\n", + " 1. Directly answers the query\n", + " 2. Incorporates relevant information from the context documents\n", + " 3. Provides clear and concise information\n", + " \"\"\"\n", + " \n", + " # Prepare API arguments\n", + " api_kwargs = self.model_client.convert_inputs_to_api_kwargs(\n", + " input=full_prompt,\n", + " model_kwargs=self.model_kwargs,\n", + " model_type=ModelType.LLM\n", + " )\n", + " \n", + " # Call API and parse response\n", + " response = self.model_client.call(\n", + " api_kwargs=api_kwargs, \n", + " model_type=ModelType.LLM\n", + " )\n", + " response_text = self.model_client.parse_chat_completion(response)\n", + " \n", + " return response_text\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`run_rag_pipeline` demonstrates how to use the AdalflowRAGPipeline to handle retrieval-augmented generation. It initializes the pipeline with specified retrieval and context token limits, loads documents from a directory, and processes a list of queries. For each query, the function retrieves relevant context, generates a response using the pipeline, and prints the results." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "def run_rag_pipeline(model_client, model_kwargs, documents, queries):\n", + "\n", + " # Example usage of RAG pipeline\n", + " rag_pipeline = AdalflowRAGPipeline(\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " top_k_retrieval=1, # Retrieve top 1 most relevant chunks\n", + " max_context_tokens=800 # Limit context to 1500 tokens\n", + " )\n", + "\n", + " # Add documents from a directory of text files\n", + " rag_pipeline.add_documents_from_directory(documents)\n", + " \n", + " # Generate responses\n", + " for query in queries:\n", + " print(f\"\\nQuery: {query}\")\n", + " response = rag_pipeline.generate_response(query)\n", + " print(f\"Response: {response}\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Query: What year was the Crystal Cavern discovered?\n", + "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=14, prompt_tokens=203, total_tokens=217), raw_response='The Crystal Cavern was discovered in 1987 by divers.', metadata=None)\n", + "\n", + "Query: What is the name of the rare tree in Elmsworth?\n", + "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=212, total_tokens=229), raw_response='The rare tree in Elmsworth is known as the \"Moonshade Willow\".', metadata=None)\n", + "\n", + "Query: What local legend claim that Lunaflits surrounds?\n", + "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=19, prompt_tokens=206, total_tokens=225), raw_response='Local legend claims that Lunaflits are guardians of ancient treasure buried deep within the canyon.', metadata=None)\n", + "\n", + "Query: What year was the Crystal Cavern discovered?\n", + "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=107, prompt_tokens=184, total_tokens=291), raw_response='The Crystal Cavern was discovered by divers in the year 1987 beneath the icy waters of Lake Aurora. The cavern is known for its shimmering quartz formations that refract sunlight into a spectrum of colors, and it is believed to have served as a sanctuary for an ancient civilization that revered the crystals as conduits to the spirit world. Artifacts recovered from the cavern are carved with intricate symbols, indicating a deep connection to celestial events. However, accessing the cavern is dangerous due to the freezing temperatures and strong currents of the lake.', metadata=None)\n", + "\n", + "Query: What is the name of the rare tree in Elmsworth?\n", + "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=104, prompt_tokens=193, total_tokens=297), raw_response='The rare tree in Elmsworth is called the \"Moonshade Willow.\" It blooms once every seven years, emitting a soft glow from its blossoms. Villagers believe that meditating under its branches brings vivid dreams of the future. The tree\\'s bark contains a secret resin used in ancient healing rituals. Elders claim that the Moonshade Willow was a gift from a goddess to protect the village. Researchers have found that the tree can only thrive in Elmsworth\\'s unique soil, making it impossible to cultivate elsewhere.', metadata=None)\n", + "\n", + "Query: What local legend claim that Lunaflits surrounds?\n", + "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=100, prompt_tokens=187, total_tokens=287), raw_response='Local legends claim that Lunaflits, the glowing insects found in the remote desert canyon, are believed to be guardians of ancient treasure buried deep within the canyon. These creatures emit a constant, soothing green light that illuminates the canyon at night, and their rhythmic light pulses form intricate patterns, suggesting a form of communication among them. The ethereal glow created by the Lunaflits and the rare moss reflecting their light have contributed to the mystical reputation of these insects as protectors of hidden riches.', metadata=None)\n" + ] + } + ], + "source": [ + "# setup_env()\n", + "\n", + "documents = '../../tutorials/assets/documents'\n", + "\n", + "queries = [\n", + " \"What year was the Crystal Cavern discovered?\",\n", + " \"What is the name of the rare tree in Elmsworth?\",\n", + " \"What local legend claim that Lunaflits surrounds?\"\n", + "]\n", + "\n", + "groq_model_kwargs = {\n", + " \"model\": \"llama-3.2-1b-preview\", # Use 16k model for larger context\n", + " \"temperature\": 0.1,\n", + " \"max_tokens\": 800,\n", + "}\n", + "\n", + "openai_model_kwargs = {\n", + " \"model\": \"gpt-3.5-turbo\",\n", + " \"temperature\": 0.1,\n", + " \"max_tokens\": 800,\n", + "}\n", + "# Below example shows that adalflow can be used in a genric manner for any api provider\n", + "# without worrying about prompt and parsing results\n", + "run_rag_pipeline(GroqAPIClient(), groq_model_kwargs, documents, queries)\n", + "run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/tutorials/adalflow_rag_vanilla.ipynb b/notebooks/tutorials/adalflow_rag_vanilla.ipynb new file mode 100644 index 00000000..34a53174 --- /dev/null +++ b/notebooks/tutorials/adalflow_rag_vanilla.ipynb @@ -0,0 +1,376 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 🤗 Welcome to AdalFlow!\n", + "## The PyTorch library to auto-optimize any LLM task pipelines\n", + "\n", + "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", + "\n", + "\n", + "# Quick Links\n", + "\n", + "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", + "\n", + "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", + "\n", + "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", + "\n", + "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", + "\n", + "# Author\n", + "This notebook was created by community contributor [Ajith](https://github.com/ajithvcoder/).\n", + "\n", + "# Outline\n", + "\n", + "This is a quick introduction of what AdalFlow is capable of. We will cover:\n", + "\n", + "* How to use adalflow for rag\n", + "\n", + "Adalflow can be used in a genric manner for any api provider without worrying much about prompt, \n", + "model args and parsing results\n", + "\n", + "**Next: Try our [adalflow-rag-for-documents](\"https://colab.research.google.com/github.com/SylphAI-Inc/AdalFlow/blob/main/notebooks/tutorials/adalflow_rag_documents.ipynb\")**\n", + "\n", + "\n", + "# Installation\n", + "\n", + "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq`, and `faiss`(cpu version) from the extra packages.\n", + "\n", + " ```bash\n", + " pip install torch --index-url https://download.pytorch.org/whl/cpu\n", + " pip install sentence-transformers==3.3.1\n", + " pip install adalflow[openai,groq,faiss-cpu]\n", + " ```\n", + "2. Setup `openai` and `groq` API key in the environment variables" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set Environment Variables\n", + "\n", + "Note: Enter your api keys in below cell #todo" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Overwriting .env\n" + ] + } + ], + "source": [ + "%%writefile .env\n", + "\n", + "OPENAI_API_KEY=\"PASTE-OPENAI_API_KEY_HERE\"\n", + "GROQ_API_KEY=\"PASTE-GROQ_API_KEY-HERE\"" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from adalflow.utils import setup_env\n", + "\n", + "# Load environment variables - Make sure to have OPENAI_API_KEY in .env file and .env is present in current folder\n", + "setup_env(\".env\")" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/workspace/ajithdev/AdalFlow/.venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "source": [ + "import os\n", + "from typing import List, Dict\n", + "import numpy as np\n", + "from sentence_transformers import SentenceTransformer\n", + "from faiss import IndexFlatL2\n", + "\n", + "from adalflow.components.model_client import GroqAPIClient, OpenAIClient\n", + "from adalflow.core.types import ModelType\n", + "from adalflow.utils import setup_env" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`AdalflowRAGPipeline` is a class that implements a Retrieval-Augmented Generation (RAG) pipeline with adalflow. It integrates:\n", + "\n", + "- Embedding models (e.g., Sentence Transformers) for document and query embeddings.\n", + "- FAISS for vector similarity search.\n", + "- A LLM client to generate context-aware responses using retrieved documents." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "class AdalflowRAGPipeline:\n", + " def __init__(self, \n", + " model_client = None,\n", + " model_kwargs = None,\n", + " embedding_model='all-MiniLM-L6-v2', \n", + " vector_dim=384, \n", + " top_k_retrieval=1):\n", + " \"\"\" \n", + " Initialize RAG Pipeline with embedding and retrieval components\n", + " \n", + " Args:\n", + " embedding_model (str): Sentence transformer model for embeddings\n", + " vector_dim (int): Dimension of embedding vectors\n", + " top_k_retrieval (int): Number of documents to retrieve\n", + " \"\"\"\n", + " # Initialize model client for generation\n", + " self.model_client = model_client\n", + " \n", + " # Initialize embedding model\n", + " self.embedding_model = SentenceTransformer(embedding_model)\n", + " \n", + " # Initialize FAISS index for vector similarity search\n", + " self.index = IndexFlatL2(vector_dim)\n", + " \n", + " # Store document texts and their embeddings\n", + " self.documents = []\n", + " self.document_embeddings = []\n", + " \n", + " # Retrieval parameters\n", + " self.top_k_retrieval = top_k_retrieval\n", + " \n", + " # Conversation history and context\n", + " self.conversation_history = \"\"\n", + " self.model_kwargs = model_kwargs\n", + "\n", + " def add_documents(self, documents: List[str]):\n", + " \"\"\"\n", + " Add documents to the RAG pipeline's knowledge base\n", + " \n", + " Args:\n", + " documents (List[str]): List of document texts to add\n", + " \"\"\"\n", + " for doc in documents:\n", + " # Embed document\n", + " embedding = self.embedding_model.encode(doc)\n", + " \n", + " # Add to index and document store\n", + " self.index.add(np.array([embedding]))\n", + " self.documents.append(doc)\n", + " self.document_embeddings.append(embedding)\n", + "\n", + " def retrieve_relevant_docs(self, query: str) -> List[str]:\n", + " \"\"\"\n", + " Retrieve most relevant documents for a given query\n", + " \n", + " Args:\n", + " query (str): Input query to find relevant documents\n", + " \n", + " Returns:\n", + " List[str]: Top k most relevant documents\n", + " \"\"\"\n", + " # Embed query\n", + " query_embedding = self.embedding_model.encode(query)\n", + " \n", + " # Perform similarity search\n", + " distances, indices = self.index.search(\n", + " np.array([query_embedding]), \n", + " self.top_k_retrieval\n", + " )\n", + " \n", + " # Retrieve and return top documents\n", + " return [self.documents[i] for i in indices[0]]\n", + "\n", + " def generate_response(self, query: str) -> str:\n", + " \"\"\"\n", + " Generate a response using retrieval-augmented generation\n", + " \n", + " Args:\n", + " query (str): User's input query\n", + " \n", + " Returns:\n", + " str: Generated response incorporating retrieved context\n", + " \"\"\"\n", + " # Retrieve relevant documents\n", + " retrieved_docs = self.retrieve_relevant_docs(query)\n", + " \n", + " # Construct context-aware prompt\n", + " context = \"\\n\\n\".join([f\"Context Document: {doc}\" for doc in retrieved_docs])\n", + " full_prompt = f\"\"\"\n", + " Context:\n", + " {context}\n", + " \n", + " Query: {query}\n", + " \n", + " Generate a comprehensive and informative response that:\n", + " 1. Uses the provided context documents\n", + " 2. Directly answers the query\n", + " 3. Incorporates relevant information from the context\n", + " \"\"\"\n", + " \n", + " # Prepare API arguments\n", + " api_kwargs = self.model_client.convert_inputs_to_api_kwargs(\n", + " input=full_prompt,\n", + " model_kwargs=self.model_kwargs,\n", + " model_type=ModelType.LLM\n", + " )\n", + " \n", + " # Call API and parse response\n", + " response = self.model_client.call(\n", + " api_kwargs=api_kwargs, \n", + " model_type=ModelType.LLM\n", + " )\n", + " response_text = self.model_client.parse_chat_completion(response)\n", + " \n", + " # Update conversation history\n", + " self.conversation_history += f\"\\nQuery: {query}\\nResponse: {response_text}\"\n", + " \n", + " return response_text\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `run_rag_pipeline` function demonstrates how to use the AdalflowRAGPipeline for embedding documents, retrieving relevant context, and generating responses:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "def run_rag_pipeline(model_client, model_kwargs, documents, queries):\n", + " rag_pipeline = AdalflowRAGPipeline(model_client=model_client, model_kwargs=model_kwargs)\n", + "\n", + " rag_pipeline.add_documents(documents)\n", + "\n", + " # Generate responses\n", + " for query in queries:\n", + " print(f\"\\nQuery: {query}\")\n", + " response = rag_pipeline.generate_response(query)\n", + " print(f\"Response: {response}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Query: Does Ajith Kumar has any nick name ?\n", + "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=78, prompt_tokens=122, total_tokens=200), raw_response='Based on the provided context documents, Ajith Kumar, also known as Ajithvcoder, has a nickname that he has given himself. According to the context, Ajithvcoder is his nickname that he has chosen for himself.\\n\\nTherefore, the answer to the query is:\\n\\nYes, Ajith Kumar has a nickname that he has given himself, which is Ajithvcoder.', metadata=None)\n", + "\n", + "Query: What is the ajithvcoder's favourite food?\n", + "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=67, prompt_tokens=109, total_tokens=176), raw_response='Based on the provided context document, I can confidently answer the query as follows:\\n\\nAjithvcoder\\'s favourite food is Hyderabadi Panner Dum Briyani.\\n\\nThis answer is directly supported by the context document, which states: \"ajithvcoder likes Hyderabadi panner dum briyani much.\"', metadata=None)\n", + "\n", + "Query: When did ajithvcoder graduated ?\n", + "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=57, prompt_tokens=107, total_tokens=164), raw_response=\"Based on the provided context documents, we can determine that Ajith V.Coder graduated on May 2016.\\n\\nHere's a comprehensive and informative response that directly answers the query:\\n\\nAjith V.Coder graduated on May 2016, which is mentioned in the context document.\", metadata=None)\n" + ] + } + ], + "source": [ + "# setup_env()\n", + "\n", + "# ajithvcoder's statements are added so that we can validate that the LLM is generating from these lines only\n", + "documents = [\n", + " \"ajithvcoder is a good person whom the world knows as Ajith Kumar, ajithvcoder is his nick name that AjithKumar gave himself\",\n", + " \"The Eiffel Tower is a famous landmark in Paris, built in 1889 for the World's Fair.\",\n", + " \"ajithvcoder likes Hyderabadi panner dum briyani much.\",\n", + " \"The Louvre Museum in Paris is the world's largest art museum, housing thousands of works of art.\",\n", + " \"ajithvcoder has a engineering degree and he graduated on May, 2016.\"\n", + "]\n", + "\n", + "# Questions related to ajithvcoder's are added so that we can validate\n", + "# that the LLM is generating from above given lines only\n", + "queries = [\n", + " \"Does Ajith Kumar has any nick name ?\",\n", + " \"What is the ajithvcoder's favourite food?\",\n", + " \"When did ajithvcoder graduated ?\"\n", + "]\n", + "\n", + "groq_model_kwargs = {\n", + " \"model\": \"llama-3.2-1b-preview\", # Use 16k model for larger context\n", + " \"temperature\": 0.1,\n", + " \"max_tokens\": 800,\n", + "}\n", + "\n", + "openai_model_kwargs = {\n", + " \"model\": \"gpt-3.5-turbo\", # Use 16k model for larger context\n", + " \"temperature\": 0.1,\n", + " \"max_tokens\": 800,\n", + "}\n", + "\n", + "# Below example shows that adalflow can be used in a genric manner for any api provider\n", + "# without worrying about prompt and parsing results\n", + "model_client = GroqAPIClient()\n", + "run_rag_pipeline(model_client, groq_model_kwargs, documents, queries)\n", + "run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/tutorials/adalflow_rag_documents.py b/tutorials/adalflow_rag_documents.py new file mode 100644 index 00000000..e1807b2d --- /dev/null +++ b/tutorials/adalflow_rag_documents.py @@ -0,0 +1,248 @@ +import os +import tiktoken +from typing import List +import numpy as np +from sentence_transformers import SentenceTransformer +from faiss import IndexFlatL2 + +from adalflow.components.model_client import GroqAPIClient, OpenAIClient +from adalflow.core.types import ModelType +from adalflow.utils import setup_env + +""" +pip install torch --index-url https://download.pytorch.org/whl/cpu +pip install sentence-transformers==3.3.1 +pip install faiss-cpu==1.9.0.post1 +""" + + +class AdalflowRAGPipeline: + def __init__( + self, + model_client=None, + model_kwargs=None, + embedding_model="all-MiniLM-L6-v2", + vector_dim=384, + top_k_retrieval=3, + max_context_tokens=800, + ): + """ + Initialize RAG Pipeline for handling large text files + + Args: + embedding_model (str): Sentence transformer model for embeddings + vector_dim (int): Dimension of embedding vectors + top_k_retrieval (int): Number of documents to retrieve + max_context_tokens (int): Maximum tokens to send to LLM + """ + # Initialize model client for generation + self.model_client = model_client + + # Initialize tokenizer for precise token counting + self.tokenizer = tiktoken.get_encoding("cl100k_base") + + # Initialize embedding model + self.embedding_model = SentenceTransformer(embedding_model) + + # Initialize FAISS index for vector similarity search + self.index = IndexFlatL2(vector_dim) + + # Store document texts, embeddings, and metadata + self.documents = [] + self.document_embeddings = [] + self.document_metadata = [] + + # Retrieval and context management parameters + self.top_k_retrieval = top_k_retrieval + self.max_context_tokens = max_context_tokens + + # Model generation parameters + self.model_kwargs = model_kwargs + + def load_text_file(self, file_path: str) -> List[str]: + """ + Load a large text file and split into manageable chunks + + Args: + file_path (str): Path to the text file + + Returns: + List[str]: List of document chunks + """ + with open(file_path, "r", encoding="utf-8") as file: + # Read entire file + content = file.read() + + # Split content into chunks (e.g., 10 lines per chunk) + lines = content.split("\n") + chunks = [] + chunk_size = 10 # Adjust based on your file structure + + for i in range(0, len(lines), chunk_size): + chunk = "\n".join(lines[i : i + chunk_size]) + chunks.append(chunk) + + return chunks + + def add_documents_from_directory(self, directory_path: str): + """ + Add documents from all text files in a directory + + Args: + directory_path (str): Path to directory containing text files + """ + for filename in os.listdir(directory_path): + if filename.endswith(".txt"): + file_path = os.path.join(directory_path, filename) + document_chunks = self.load_text_file(file_path) + + for chunk in document_chunks: + # Embed document chunk + embedding = self.embedding_model.encode(chunk) + + # Add to index and document store + self.index.add(np.array([embedding])) + self.documents.append(chunk) + self.document_embeddings.append(embedding) + self.document_metadata.append( + { + "filename": filename, + "chunk_index": len(self.document_metadata), + } + ) + + def count_tokens(self, text: str) -> int: + """ + Count tokens in a given text + + Args: + text (str): Input text + + Returns: + int: Number of tokens + """ + return len(self.tokenizer.encode(text)) + + def retrieve_and_truncate_context(self, query: str) -> str: + """ + Retrieve relevant documents and truncate to fit token limit + + Args: + query (str): Input query + + Returns: + str: Concatenated context within token limit + """ + # Retrieve relevant documents + query_embedding = self.embedding_model.encode(query) + distances, indices = self.index.search( + np.array([query_embedding]), self.top_k_retrieval + ) + + # Collect and truncate context + context = [] + current_tokens = 0 + + for idx in indices[0]: + doc = self.documents[idx] + doc_tokens = self.count_tokens(doc) + + # Check if adding this document would exceed token limit + if current_tokens + doc_tokens <= self.max_context_tokens: + context.append(doc) + current_tokens += doc_tokens + else: + break + + return "\n\n".join(context) + + def generate_response(self, query: str) -> str: + """ + Generate a response using retrieval-augmented generation + + Args: + query (str): User's input query + + Returns: + str: Generated response incorporating retrieved context + """ + # Retrieve and truncate context + retrieved_context = self.retrieve_and_truncate_context(query) + + # Construct context-aware prompt + full_prompt = f""" + Context Documents: + {retrieved_context} + + Query: {query} + + Generate a comprehensive response that: + 1. Directly answers the query + 2. Incorporates relevant information from the context documents + 3. Provides clear and concise information + """ + + # Prepare API arguments + api_kwargs = self.model_client.convert_inputs_to_api_kwargs( + input=full_prompt, model_kwargs=self.model_kwargs, model_type=ModelType.LLM + ) + + # Call API and parse response + response = self.model_client.call( + api_kwargs=api_kwargs, model_type=ModelType.LLM + ) + response_text = self.model_client.parse_chat_completion(response) + + return response_text + + +def run_rag_pipeline(model_client, model_kwargs, documents, queries): + + # Example usage of RAG pipeline + rag_pipeline = AdalflowRAGPipeline( + model_client=model_client, + model_kwargs=model_kwargs, + top_k_retrieval=2, # Retrieve top 3 most relevant chunks + max_context_tokens=800, # Limit context to 1500 tokens + ) + + # Add documents from a directory of text files + rag_pipeline.add_documents_from_directory(documents) + + # Generate responses + for query in queries: + print(f"\nQuery: {query}") + response = rag_pipeline.generate_response(query) + print(f"Response: {response}") + + +def main(): + setup_env() + + documents = "./tutorials/assets/documents" + + queries = [ + "What year was the Crystal Cavern discovered?", + "What is the name of the rare tree in Elmsworth?", + "What local legend claim that Lunaflits surrounds?", + ] + + groq_model_kwargs = { + "model": "llama-3.2-1b-preview", # Use 16k model for larger context + "temperature": 0.1, + "max_tokens": 800, + } + + openai_model_kwargs = { + "model": "gpt-3.5-turbo", + "temperature": 0.1, + "max_tokens": 800, + } + # Below example shows that adalflow can be used in a genric manner for any api provider + # without worrying about prompt and parsing results + run_rag_pipeline(GroqAPIClient(), groq_model_kwargs, documents, queries) + run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries) + + +if __name__ == "__main__": + main() diff --git a/tutorials/adalflow_rag_vanilla.py b/tutorials/adalflow_rag_vanilla.py new file mode 100644 index 00000000..36af7997 --- /dev/null +++ b/tutorials/adalflow_rag_vanilla.py @@ -0,0 +1,188 @@ +from typing import List +import numpy as np +from sentence_transformers import SentenceTransformer +from faiss import IndexFlatL2 + +from adalflow.components.model_client import GroqAPIClient, OpenAIClient +from adalflow.core.types import ModelType +from adalflow.utils import setup_env + +""" +pip install torch --index-url https://download.pytorch.org/whl/cpu +pip install sentence-transformers==3.3.1 +pip install faiss-cpu==1.9.0.post1 +""" + + +class AdalflowRAGPipeline: + def __init__( + self, + model_client=None, + model_kwargs=None, + embedding_model="all-MiniLM-L6-v2", + vector_dim=384, + top_k_retrieval=1, + ): + """ + Initialize RAG Pipeline with embedding and retrieval components + + Args: + embedding_model (str): Sentence transformer model for embeddings + vector_dim (int): Dimension of embedding vectors + top_k_retrieval (int): Number of documents to retrieve + """ + # Initialize model client for generation + self.model_client = model_client + + # Initialize embedding model + self.embedding_model = SentenceTransformer(embedding_model) + + # Initialize FAISS index for vector similarity search + self.index = IndexFlatL2(vector_dim) + + # Store document texts and their embeddings + self.documents = [] + self.document_embeddings = [] + + # Retrieval parameters + self.top_k_retrieval = top_k_retrieval + + # Conversation history and context + self.conversation_history = "" + self.model_kwargs = model_kwargs + + def add_documents(self, documents: List[str]): + """ + Add documents to the RAG pipeline's knowledge base + + Args: + documents (List[str]): List of document texts to add + """ + for doc in documents: + # Embed document + embedding = self.embedding_model.encode(doc) + + # Add to index and document store + self.index.add(np.array([embedding])) + self.documents.append(doc) + self.document_embeddings.append(embedding) + + def retrieve_relevant_docs(self, query: str) -> List[str]: + """ + Retrieve most relevant documents for a given query + + Args: + query (str): Input query to find relevant documents + + Returns: + List[str]: Top k most relevant documents + """ + # Embed query + query_embedding = self.embedding_model.encode(query) + + # Perform similarity search + distances, indices = self.index.search( + np.array([query_embedding]), self.top_k_retrieval + ) + + # Retrieve and return top documents + return [self.documents[i] for i in indices[0]] + + def generate_response(self, query: str) -> str: + """ + Generate a response using retrieval-augmented generation + + Args: + query (str): User's input query + + Returns: + str: Generated response incorporating retrieved context + """ + # Retrieve relevant documents + retrieved_docs = self.retrieve_relevant_docs(query) + + # Construct context-aware prompt + context = "\n\n".join([f"Context Document: {doc}" for doc in retrieved_docs]) + full_prompt = f""" + Context: + {context} + + Query: {query} + + Generate a comprehensive and informative response that: + 1. Uses the provided context documents + 2. Directly answers the query + 3. Incorporates relevant information from the context + """ + + # Prepare API arguments + api_kwargs = self.model_client.convert_inputs_to_api_kwargs( + input=full_prompt, model_kwargs=self.model_kwargs, model_type=ModelType.LLM + ) + + # Call API and parse response + response = self.model_client.call( + api_kwargs=api_kwargs, model_type=ModelType.LLM + ) + response_text = self.model_client.parse_chat_completion(response) + + # Update conversation history + self.conversation_history += f"\nQuery: {query}\nResponse: {response_text}" + + return response_text + + +def run_rag_pipeline(model_client, model_kwargs, documents, queries): + rag_pipeline = AdalflowRAGPipeline( + model_client=model_client, model_kwargs=model_kwargs + ) + + rag_pipeline.add_documents(documents) + + # Generate responses + for query in queries: + print(f"\nQuery: {query}") + response = rag_pipeline.generate_response(query) + print(f"Response: {response}") + + +def main(): + setup_env() + + # ajithvcoder's statements are added so that we can validate that the LLM is generating from these lines only + documents = [ + "ajithvcoder is a good person whom the world knows as Ajith Kumar, ajithvcoder is his nick name that AjithKumar gave himself", + "The Eiffel Tower is a famous landmark in Paris, built in 1889 for the World's Fair.", + "ajithvcoder likes Hyderabadi panner dum briyani much.", + "The Louvre Museum in Paris is the world's largest art museum, housing thousands of works of art.", + "ajithvcoder has a engineering degree and he graduated on May, 2016.", + ] + + # Questions related to ajithvcoder's are added so that we can validate + # that the LLM is generating from above given lines only + queries = [ + "Does Ajith Kumar has any nick name ?", + "What is the ajithvcoder's favourite food?", + "When did ajithvcoder graduated ?", + ] + + groq_model_kwargs = { + "model": "llama-3.2-1b-preview", # Use 16k model for larger context + "temperature": 0.1, + "max_tokens": 800, + } + + openai_model_kwargs = { + "model": "gpt-3.5-turbo", # Use 16k model for larger context + "temperature": 0.1, + "max_tokens": 800, + } + + # Below example shows that adalflow can be used in a genric manner for any api provider + # without worrying about prompt and parsing results + run_rag_pipeline(GroqAPIClient(), groq_model_kwargs, documents, queries) + run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries) + + +if __name__ == "__main__": + main() diff --git a/tutorials/assets/documents/The Bioluminescent Guardians of the Desert Canyon.txt b/tutorials/assets/documents/The Bioluminescent Guardians of the Desert Canyon.txt new file mode 100644 index 00000000..324f4b41 --- /dev/null +++ b/tutorials/assets/documents/The Bioluminescent Guardians of the Desert Canyon.txt @@ -0,0 +1,6 @@ +In a remote desert canyon, scientists discovered a colony of glowing insects called "Lunaflits." These +creatures produce bioluminescence to attract mates and ward off predators. Unlike fireflies, Lunaflits +emit a constant, soothing green light that illuminates the canyon at night. The canyon walls are covered +with a rare moss that reflects their light, creating an ethereal glow. Researchers have found that Lunaflits +communicate through rhythmic light pulses, forming intricate patterns. Local legends claim these insects +are guardians of ancient treasure buried deep within the canyon. diff --git a/tutorials/assets/documents/The Enigmatic Crystal Cavern of Lake Aurora.txt b/tutorials/assets/documents/The Enigmatic Crystal Cavern of Lake Aurora.txt new file mode 100644 index 00000000..0b4929ab --- /dev/null +++ b/tutorials/assets/documents/The Enigmatic Crystal Cavern of Lake Aurora.txt @@ -0,0 +1,6 @@ +Hidden beneath the icy waters of Lake Aurora lies the Crystal Cavern, a natural wonder discovered by divers +in 1987. The cavern is adorned with shimmering quartz formations that refract sunlight into a spectrum of +colors. It is said that the cavern once served as a sanctuary for an ancient civilization that revered the +crystals as conduits to the spirit world. Explorers have recovered artifacts carved with intricate symbols, +suggesting a deep connection to celestial events. However, accessing the cavern is perilous due to the lake's +freezing temperatures and strong currents. diff --git a/tutorials/assets/documents/The Legend of the Moonshade Willow.txt b/tutorials/assets/documents/The Legend of the Moonshade Willow.txt new file mode 100644 index 00000000..36e342b3 --- /dev/null +++ b/tutorials/assets/documents/The Legend of the Moonshade Willow.txt @@ -0,0 +1,6 @@ +In the mystical village of Elmsworth, a rare tree known as the "Moonshade Willow" blooms once every seven +years. Its blossoms emit a soft glow, and villagers believe that meditating under its branches brings vivid +dreams of the future. The tree's bark is said to contain a secret resin used in ancient healing rituals. Elders +claim the Moonshade Willow was a gift from a goddess to protect the village. Despite its sacred status, +researchers have discovered that the tree thrives only in Elmsworth's unique soil, making it impossible to +cultivate elsewhere. From e7a714c6f7007d92396ee8cc7fd290e7fe23189b Mon Sep 17 00:00:00 2001 From: Jeff Zhang Date: Fri, 13 Dec 2024 09:53:32 +0800 Subject: [PATCH 36/40] [MINOR] Rename lightrag to adalflow in doc --- docs/source/get_started/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/get_started/installation.rst b/docs/source/get_started/installation.rst index cfdae169..32dae57f 100644 --- a/docs/source/get_started/installation.rst +++ b/docs/source/get_started/installation.rst @@ -85,7 +85,7 @@ Here is the list of our tested versions: google-generativeai = "^0.7.2" cohere = "^5.5.8" -You can install the optional packages with either ``pip install package_name`` or ``pip install lightrag[package_name]``. +You can install the optional packages with either ``pip install package_name`` or ``pip install adalflow[package_name]``. From 14f85cbf7b47d3447469f67ce292148d3eceb9a8 Mon Sep 17 00:00:00 2001 From: Jeff Zhang Date: Fri, 13 Dec 2024 17:01:45 +0800 Subject: [PATCH 37/40] [MINOR] Rename from lightrag to adalflow --- .../components/output_parsers/outputs.py | 2 +- adalflow/adalflow/core/__init__.py | 4 ++-- .../adalflow/core/default_prompt_template.py | 20 +++++++++---------- adalflow/adalflow/core/generator.py | 4 ++-- adalflow/adalflow/core/model_client.py | 6 +++--- adalflow/adalflow/core/prompt_builder.py | 4 ++-- docs/source/tutorials/generator.rst | 4 ++-- docs/source/tutorials/prompt.rst | 8 ++++---- 8 files changed, 26 insertions(+), 26 deletions(-) diff --git a/adalflow/adalflow/components/output_parsers/outputs.py b/adalflow/adalflow/components/output_parsers/outputs.py index 1f4ff652..288cba67 100644 --- a/adalflow/adalflow/components/output_parsers/outputs.py +++ b/adalflow/adalflow/components/output_parsers/outputs.py @@ -75,7 +75,7 @@ class OutputParser(Component): This interface helps users customize output parsers with consistent interfaces for the Generator. Even though you don't always need to subclass it. - LightRAG uses two core components: + AdalFlow uses two core components: 1. the Prompt to format output instruction 2. A string parser component from core.string_parser for response parsing. """ diff --git a/adalflow/adalflow/core/__init__.py b/adalflow/adalflow/core/__init__.py index 852bc6f6..38472520 100644 --- a/adalflow/adalflow/core/__init__.py +++ b/adalflow/adalflow/core/__init__.py @@ -3,7 +3,7 @@ from .component import Component, FunComponent, fun_to_component from .container import Sequential from .db import LocalDB -from .default_prompt_template import DEFAULT_LIGHTRAG_SYSTEM_PROMPT +from .default_prompt_template import DEFAULT_ADALFLOW_SYSTEM_PROMPT from .embedder import Embedder, BatchEmbedder from .generator import Generator, BackwardEngine from .model_client import ModelClient @@ -58,7 +58,7 @@ "Generator", "BackwardEngine", "Prompt", - "DEFAULT_LIGHTRAG_SYSTEM_PROMPT", + "DEFAULT_ADALFLOW_SYSTEM_PROMPT", # "Parameter", "required_field", "ModelClient", diff --git a/adalflow/adalflow/core/default_prompt_template.py b/adalflow/adalflow/core/default_prompt_template.py index 6837e22d..6bdd568e 100644 --- a/adalflow/adalflow/core/default_prompt_template.py +++ b/adalflow/adalflow/core/default_prompt_template.py @@ -1,16 +1,16 @@ -"""This is the default system prompt template used in the LightRAG. +"""This is the default system prompt template used in the AdalFlow. Use :ref:`Prompt ` class to manage it. """ __all__ = [ - "LIGHTRAG_DEFAULT_PROMPT_ARGS", - "LIGHTRAG_DEFAULT_PROMPT_TRAINABLE_PARAMS", - "SIMPLE_DEFAULT_LIGHTRAG_SYSTEM_PROMPT", - "DEFAULT_LIGHTRAG_SYSTEM_PROMPT", + "ADALFLOW_DEFAULT_PROMPT_ARGS", + "ADALFLOW_DEFAULT_PROMPT_TRAINABLE_PARAMS", + "SIMPLE_DEFAULT_ADALFLOW_SYSTEM_PROMPT", + "DEFAULT_ADALFLOW_SYSTEM_PROMPT", ] # TODO: potentially make a data class for this -LIGHTRAG_DEFAULT_PROMPT_ARGS = [ +ADALFLOW_DEFAULT_PROMPT_ARGS = [ "task_desc_str", # task description "output_format_str", # output format of the task "tools_str", # tools used in the task @@ -21,17 +21,17 @@ "input_str", # user query or input ] -LIGHTRAG_DEFAULT_PROMPT_TRAINABLE_PARAMS = [ +ADALFLOW_DEFAULT_PROMPT_TRAINABLE_PARAMS = [ "task_desc_str", # "output_format_str", "examples_str", ] -SIMPLE_DEFAULT_LIGHTRAG_SYSTEM_PROMPT = r"""{{task_desc_str}} +SIMPLE_DEFAULT_ADALFLOW_SYSTEM_PROMPT = r"""{{task_desc_str}} User: {{input_str}} You:""" -DEFAULT_LIGHTRAG_SYSTEM_PROMPT = r""" +DEFAULT_ADALFLOW_SYSTEM_PROMPT = r""" {# task desc #} {% if task_desc_str %} {{task_desc_str}} @@ -87,7 +87,7 @@ {% endif %} """ -"""This is the default system prompt template used in the LightRAG. +"""This is the default system prompt template used in the AdalFlow. Use :ref:`Prompt ` class to manage it. """ diff --git a/adalflow/adalflow/core/generator.py b/adalflow/adalflow/core/generator.py index 309f954f..dd0ff5f6 100644 --- a/adalflow/adalflow/core/generator.py +++ b/adalflow/adalflow/core/generator.py @@ -26,7 +26,7 @@ from adalflow.core.prompt_builder import Prompt from adalflow.core.functional import compose_model_kwargs from adalflow.core.model_client import ModelClient -from adalflow.core.default_prompt_template import DEFAULT_LIGHTRAG_SYSTEM_PROMPT +from adalflow.core.default_prompt_template import DEFAULT_ADALFLOW_SYSTEM_PROMPT from adalflow.optim.function import BackwardContext from adalflow.utils.cache import CachedEngine from adalflow.tracing.callback_manager import CallbackManager @@ -113,7 +113,7 @@ def __init__( Got {model_client} instead." ) - template = template or DEFAULT_LIGHTRAG_SYSTEM_PROMPT + template = template or DEFAULT_ADALFLOW_SYSTEM_PROMPT # create the cache path and initialize the cache engine diff --git a/adalflow/adalflow/core/model_client.py b/adalflow/adalflow/core/model_client.py index fb825f02..31399eb6 100644 --- a/adalflow/adalflow/core/model_client.py +++ b/adalflow/adalflow/core/model_client.py @@ -25,7 +25,7 @@ class ModelClient(Component): (1) Initialize the client, including both sync and async. -(2) Convert the standard LightRAG components inputs to the API-specific format. +(2) Convert the standard AdalFlow components inputs to the API-specific format. (3) Call the API and parse the response. @@ -33,7 +33,7 @@ class ModelClient(Component): Check the subclasses in `components/model_client/` directory for the functional API clients we have. - This interface is designed to bridge the gap between LightRAG components inputs and model APIs. + This interface is designed to bridge the gap between AdalFlow components inputs and model APIs. You can see examples of the subclasses in components/model_client/ directory. """ @@ -103,7 +103,7 @@ def track_completion_usage(self, *args, **kwargs) -> "CompletionUsage": ) def parse_embedding_response(self, response: Any) -> "EmbedderOutput": - r"""Parse the embedding response to a structure LightRAG components can understand.""" + r"""Parse the embedding response to a structure AdalFlow components can understand.""" raise NotImplementedError( f"{type(self).__name__} must implement parse_embedding_response method" ) diff --git a/adalflow/adalflow/core/prompt_builder.py b/adalflow/adalflow/core/prompt_builder.py index 03a61959..eca45557 100644 --- a/adalflow/adalflow/core/prompt_builder.py +++ b/adalflow/adalflow/core/prompt_builder.py @@ -8,7 +8,7 @@ from adalflow.core.component import Component -from adalflow.core.default_prompt_template import DEFAULT_LIGHTRAG_SYSTEM_PROMPT +from adalflow.core.default_prompt_template import DEFAULT_ADALFLOW_SYSTEM_PROMPT from adalflow.optim.parameter import Parameter @@ -56,7 +56,7 @@ def __init__( ): super().__init__() - self.template = template or DEFAULT_LIGHTRAG_SYSTEM_PROMPT + self.template = template or DEFAULT_ADALFLOW_SYSTEM_PROMPT self.__create_jinja2_template() self.prompt_variables: List[str] = [] for var in self._find_template_variables(self.template): diff --git a/docs/source/tutorials/generator.rst b/docs/source/tutorials/generator.rst index 53c8e7fd..a170a8de 100644 --- a/docs/source/tutorials/generator.rst +++ b/docs/source/tutorials/generator.rst @@ -55,7 +55,7 @@ An Orchestrator It orchestrates three components: - `Prompt`: by taking in ``template`` (string) and ``prompt_kwargs`` (dict) to format the prompt at initialization. - When the ``template`` is not provided, it defaults to :const:`DEFAULT_LIGHTRAG_SYSTEM_PROMPT`. + When the ``template`` is not provided, it defaults to :const:`DEFAULT_ADALFLOW_SYSTEM_PROMPT`. - `ModelClient`: by taking in an already instantiated ``model_client`` and ``model_kwargs`` to call the model. Switching out the model client allows you to call any LLM model using the same prompt and output parsing. @@ -485,7 +485,7 @@ It will require users to define ``Parameter`` and pass it to the ``prompt_kwargs - :class:`core.generator.Generator` - :class:`core.types.GeneratorOutput` - - :class:`core.default_prompt_template.DEFAULT_LIGHTRAG_SYSTEM_PROMPT` + - :class:`core.default_prompt_template.DEFAULT_ADALFLOW_SYSTEM_PROMPT` - :class:`core.types.ModelClientType` - :class:`core.types.ModelType` - :class:`core.string_parser.JsonParser` diff --git a/docs/source/tutorials/prompt.rst b/docs/source/tutorials/prompt.rst index 073ba087..6c393b07 100644 --- a/docs/source/tutorials/prompt.rst +++ b/docs/source/tutorials/prompt.rst @@ -6,7 +6,7 @@ Try Quickstart in Colab - + GitHub Open Source Code @@ -206,13 +206,13 @@ As with all components, you can use ``to_dict`` and ``from_dict`` to serialize a Default Prompt Template ------------------------- -In default, the ``Prompt`` class uses the :const:`DEFAULT_LIGHTRAG_SYSTEM_PROMPT` as its string template if no template is provided. +In default, the ``Prompt`` class uses the :const:`DEFAULT_ADALFLOW_SYSTEM_PROMPT` as its string template if no template is provided. This default template allows you to conditionally passing seven important variables designed from the data flow diagram above. These varaibles are: .. code-block:: python - LIGHTRAG_DEFAULT_PROMPT_ARGS = [ + ADALFLOW_DEFAULT_PROMPT_ARGS = [ "task_desc_str", # task description "output_format_str", # output format of the task "tools_str", # tools used in the task @@ -266,4 +266,4 @@ The output will be the bare minimum with only the user query and a prefix for as :class: highlight - :class:`core.prompt_builder.Prompt` - - :const:`core.default_prompt_template.DEFAULT_LIGHTRAG_SYSTEM_PROMPT` + - :const:`core.default_prompt_template.DEFAULT_ADALFLOW_SYSTEM_PROMPT` From f2c82fadc22ee475fbb7385bc5e2e6b4d68941e9 Mon Sep 17 00:00:00 2001 From: Ajith Kumar V Date: Fri, 13 Dec 2024 10:04:48 +0000 Subject: [PATCH 38/40] feat: add rst files for rag_vanilla, rag_documents --- docs/source/use_cases/rag_documents.rst | 322 ++++++++++++++++++++++++ docs/source/use_cases/rag_vanilla.rst | 263 +++++++++++++++++++ 2 files changed, 585 insertions(+) create mode 100644 docs/source/use_cases/rag_documents.rst create mode 100644 docs/source/use_cases/rag_vanilla.rst diff --git a/docs/source/use_cases/rag_documents.rst b/docs/source/use_cases/rag_documents.rst new file mode 100644 index 00000000..26c8a430 --- /dev/null +++ b/docs/source/use_cases/rag_documents.rst @@ -0,0 +1,322 @@ +.. raw:: html + + + +RAG for documents +============================= + +Overview +-------- + +This implementation showcases an end-to-end RAG system capable of handling large-scale text files and +generating context-aware responses. It is both modular and extensible, making it adaptable to various +use cases and LLM APIs. + +**Imports** + +- **SentenceTransformer**: Used for creating dense vector embeddings for textual data. +- **FAISS**: Provides efficient similarity search using vector indexing. +- **tiktoken**: ensures that the text preprocessing aligns with the tokenization requirements of the underlying language models, making the pipeline robust and efficient. +- **GroqAPIClient and OpenAIClient**: Custom classes for interacting with different LLM providers. +- **ModelType**: Enum for specifying the model type. + +.. code-block:: python + + import os + import tiktoken + from typing import List, Dict, Tuple + import numpy as np + from sentence_transformers import SentenceTransformer + from faiss import IndexFlatL2 + + from adalflow.components.model_client import GroqAPIClient, OpenAIClient + from adalflow.core.types import ModelType + from adalflow.utils import setup_env + +The ``AdalflowRAGPipeline`` class sets up the Retrieval-Augmented Generation (RAG) pipeline. Its ``__init__`` method initializes key components: + +- An embedding model (``all-MiniLM-L6-v2``) is loaded using ``SentenceTransformer`` to convert text into dense vector embeddings with a dimensionality of 384. +- A FAISS index (``IndexFlatL2``) is created for similarity-based document retrieval. +- Parameters such as ``top_k_retrieval`` (number of documents to retrieve) and ``max_context_tokens`` (limit on token count in the context) are configured. +- A tokenizer (``tiktoken``) ensures precise token counting, crucial for handling large language models (LLMs). + +The method also initializes storage for documents, their embeddings, and associated metadata for efficient management and retrieval. + +The ``AdalflowRAGPipeline`` class provides a flexible pipeline for Retrieval-Augmented Generation (RAG), +initializing with parameters such as the embedding model (``all-MiniLM-L6-v2`` by default), vector dimension, +top-k retrieval count, and token limits for context. It utilizes a tokenizer for token counting, a +SentenceTransformer for embeddings, and a FAISS index for similarity searches, while also maintaining +document data and metadata. The ``load_text_file`` method processes large text files into manageable chunks +by splitting the content into fixed line groups, facilitating easier embedding and storage. To handle +multiple files, ``add_documents_from_directory`` iterates over text files in a directory, embeds the content, +and stores them in the FAISS index along with metadata. Token counting is achieved via the ``count_tokens`` +method, leveraging a tokenizer to precisely determine the number of tokens in a given text. The +``retrieve_and_truncate_context`` method fetches the most relevant documents from the FAISS index based on +query embeddings, truncating the context to adhere to token limits. Finally, the ``generate_response`` method +constructs a comprehensive prompt by combining the retrieved context and query, invokes the provided model +client for a response, and parses the results into a readable format. This pipeline demonstrates seamless +integration of text retrieval and generation to handle large-scale document queries effectively. + + +.. code-block:: python + + class AdalflowRAGPipeline: + def __init__(self, + model_client=None, + model_kwargs=None, + embedding_model='all-MiniLM-L6-v2', + vector_dim=384, + top_k_retrieval=3, + max_context_tokens=800): + """ + Initialize RAG Pipeline for handling large text files + + Args: + embedding_model (str): Sentence transformer model for embeddings + vector_dim (int): Dimension of embedding vectors + top_k_retrieval (int): Number of documents to retrieve + max_context_tokens (int): Maximum tokens to send to LLM + """ + # Initialize model client for generation + self.model_client = model_client + + # Initialize tokenizer for precise token counting + self.tokenizer = tiktoken.get_encoding("cl100k_base") + + # Initialize embedding model + self.embedding_model = SentenceTransformer(embedding_model) + + # Initialize FAISS index for vector similarity search + self.index = IndexFlatL2(vector_dim) + + # Store document texts, embeddings, and metadata + self.documents = [] + self.document_embeddings = [] + self.document_metadata = [] + + # Retrieval and context management parameters + self.top_k_retrieval = top_k_retrieval + self.max_context_tokens = max_context_tokens + + # Model generation parameters + self.model_kwargs = model_kwargs + + def load_text_file(self, file_path: str) -> List[str]: + """ + Load a large text file and split into manageable chunks + + Args: + file_path (str): Path to the text file + + Returns: + List[str]: List of document chunks + """ + with open(file_path, 'r', encoding='utf-8') as file: + # Read entire file + content = file.read() + + # Split content into chunks (e.g., 10 lines per chunk) + lines = content.split('\n') + chunks = [] + chunk_size = 10 # Adjust based on your file structure + + for i in range(0, len(lines), chunk_size): + chunk = '\n'.join(lines[i:i+chunk_size]) + chunks.append(chunk) + + return chunks + + def add_documents_from_directory(self, directory_path: str): + """ + Add documents from all text files in a directory + + Args: + directory_path (str): Path to directory containing text files + """ + for filename in os.listdir(directory_path): + if filename.endswith('.txt'): + file_path = os.path.join(directory_path, filename) + document_chunks = self.load_text_file(file_path) + + for chunk in document_chunks: + # Embed document chunk + embedding = self.embedding_model.encode(chunk) + + # Add to index and document store + self.index.add(np.array([embedding])) + self.documents.append(chunk) + self.document_embeddings.append(embedding) + self.document_metadata.append({ + 'filename': filename, + 'chunk_index': len(self.document_metadata) + }) + + def count_tokens(self, text: str) -> int: + """ + Count tokens in a given text + + Args: + text (str): Input text + + Returns: + int: Number of tokens + """ + return len(self.tokenizer.encode(text)) + + def retrieve_and_truncate_context(self, query: str) -> str: + """ + Retrieve relevant documents and truncate to fit token limit + + Args: + query (str): Input query + + Returns: + str: Concatenated context within token limit + """ + # Retrieve relevant documents + query_embedding = self.embedding_model.encode(query) + distances, indices = self.index.search( + np.array([query_embedding]), + self.top_k_retrieval + ) + + # Collect and truncate context + context = [] + current_tokens = 0 + + for idx in indices[0]: + doc = self.documents[idx] + doc_tokens = self.count_tokens(doc) + + # Check if adding this document would exceed token limit + if current_tokens + doc_tokens <= self.max_context_tokens: + context.append(doc) + current_tokens += doc_tokens + else: + break + + return "\n\n".join(context) + + def generate_response(self, query: str) -> str: + """ + Generate a response using retrieval-augmented generation + + Args: + query (str): User's input query + + Returns: + str: Generated response incorporating retrieved context + """ + # Retrieve and truncate context + retrieved_context = self.retrieve_and_truncate_context(query) + + # Construct context-aware prompt + full_prompt = f""" + Context Documents: + {retrieved_context} + + Query: {query} + + Generate a comprehensive response that: + 1. Directly answers the query + 2. Incorporates relevant information from the context documents + 3. Provides clear and concise information + """ + + # Prepare API arguments + api_kwargs = self.model_client.convert_inputs_to_api_kwargs( + input=full_prompt, + model_kwargs=self.model_kwargs, + model_type=ModelType.LLM + ) + + # Call API and parse response + response = self.model_client.call( + api_kwargs=api_kwargs, + model_type=ModelType.LLM + ) + response_text = self.model_client.parse_chat_completion(response) + + return response_text + +The ``run_rag_pipeline`` function demonstrates how to use the ``AdalflowRAGPipeline``. It initializes the pipeline, +adds documents from a directory, and generates responses for a list of user queries. The function is generic +and can accommodate various LLM API clients, such as GroqAPIClient or OpenAIClient, highlighting the pipeline's +flexibility and modularity. + + +.. code-block:: python + + def run_rag_pipeline(model_client, model_kwargs, documents, queries): + + # Example usage of RAG pipeline + rag_pipeline = AdalflowRAGPipeline( + model_client=model_client, + model_kwargs=model_kwargs, + top_k_retrieval=1, # Retrieve top 1 most relevant chunks + max_context_tokens=800 # Limit context to 1500 tokens + ) + + # Add documents from a directory of text files + rag_pipeline.add_documents_from_directory(documents) + + # Generate responses + for query in queries: + print(f"\nQuery: {query}") + response = rag_pipeline.generate_response(query) + print(f"Response: {response}") + + +This block provides an example of running the pipeline with different models and queries. It specifies: + +- The document directory containing the text files. +- Example queries about topics such as the "Crystal Cavern" and "rare trees in Elmsworth." +- Configuration for Groq and OpenAI model parameters, including the model type, temperature, and token limits. + +.. code-block:: python + + documents = '../../tutorials/assets/documents' + + queries = [ + "What year was the Crystal Cavern discovered?", + "What is the name of the rare tree in Elmsworth?", + "What local legend claim that Lunaflits surrounds?" + ] + + groq_model_kwargs = { + "model": "llama-3.2-1b-preview", # Use 16k model for larger context + "temperature": 0.1, + "max_tokens": 800, + } + + openai_model_kwargs = { + "model": "gpt-3.5-turbo", + "temperature": 0.1, + "max_tokens": 800, + } + # Below example shows that adalflow can be used in a genric manner for any api provider + # without worrying about prompt and parsing results + run_rag_pipeline(GroqAPIClient(), groq_model_kwargs, documents, queries) + run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries) + +The example emphasizes that ``AdalflowRAGPipeline`` can interact seamlessly with multiple API providers, +enabling integration with diverse LLMs without modifying the core logic for prompt construction or +response parsing. + + +.. admonition:: API reference + :class: highlight + + - :class:`utils.setup_env` + - :class:`core.types.ModelType` + - :class:`components.model_client.OpenAIClient` + - :class:`components.model_client.GroqAPIClient` diff --git a/docs/source/use_cases/rag_vanilla.rst b/docs/source/use_cases/rag_vanilla.rst new file mode 100644 index 00000000..1a80bbbf --- /dev/null +++ b/docs/source/use_cases/rag_vanilla.rst @@ -0,0 +1,263 @@ +.. raw:: html + + + +RAG Vanilla +============================= + +Overview +-------- + +The **RAG Vanilla** implementation is a Retrieval-Augmented Generation pipeline that integrates document +retrieval with natural language generation. This approach allows users to retrieve contextually relevant +documents from a knowledge base and generate informative responses. The code leverages components such as +sentence embeddings, FAISS indexing, and a large language model (LLM) API client. + + +**Imports** + +- **SentenceTransformer**: Used for creating dense vector embeddings for textual data. +- **FAISS**: Provides efficient similarity search using vector indexing. +- **GroqAPIClient and OpenAIClient**: Custom classes for interacting with different LLM providers. +- **ModelType**: Enum for specifying the model type. + +.. code-block:: python + + import os + from typing import List, Dict + import numpy as np + from sentence_transformers import SentenceTransformer + from faiss import IndexFlatL2 + + from adalflow.components.model_client import GroqAPIClient, OpenAIClient + from adalflow.core.types import ModelType + from adalflow.utils import setup_env + + +**Pipeline Initialization** + +- **Model Client**: Abstracts the API calls to the chosen LLM provider. +- **Embedding Model**: Defaulted to ``all-MiniLM-L6-v2``, generates 384-dimensional embeddings. +- **Vector Dimension**: Dimensionality of the embedding vectors generated by the embedding model ``all-MiniLM-L6-v2``. +- **Top K Retrieval**: Specifies the number of most relevant documents to retrieve. + +The ``add_documents()`` function encodes the documents into embeddings and stores them in the FAISS index. It +uses the SentenceTransformer model to generate vector representations of the text. These embeddings are then +added to the FAISS index for efficient similarity search and are also stored in a list for later retrieval. + +The ``retrieve_relevant_docs()`` function takes a query string as input and retrieves the top k documents +that are most relevant to the query based on their similarity. The query is first encoded into an embedding, +and then the FAISS index is used to perform a similarity search to identify the documents that are closest +in meaning to the query. + +The ``generate_response()`` function constructs a context-aware prompt by combining the retrieved documents +with the user's query. It then calls the model_client to generate a response from the language model. The +conversation history is also maintained, logging each query and its corresponding response for future +reference and context. + + +.. code-block:: python + + class AdalflowRAGPipeline: + def __init__(self, + model_client = None, + model_kwargs = None, + embedding_model='all-MiniLM-L6-v2', + vector_dim=384, + top_k_retrieval=1): + """ + Initialize RAG Pipeline with embedding and retrieval components + + Args: + embedding_model (str): Sentence transformer model for embeddings + vector_dim (int): Dimension of embedding vectors + top_k_retrieval (int): Number of documents to retrieve + """ + # Initialize model client for generation + self.model_client = model_client + + # Initialize embedding model + self.embedding_model = SentenceTransformer(embedding_model) + + # Initialize FAISS index for vector similarity search + self.index = IndexFlatL2(vector_dim) + + # Store document texts and their embeddings + self.documents = [] + self.document_embeddings = [] + + # Retrieval parameters + self.top_k_retrieval = top_k_retrieval + + # Conversation history and context + self.conversation_history = "" + self.model_kwargs = model_kwargs + + def add_documents(self, documents: List[str]): + """ + Add documents to the RAG pipeline's knowledge base + + Args: + documents (List[str]): List of document texts to add + """ + for doc in documents: + # Embed document + embedding = self.embedding_model.encode(doc) + + # Add to index and document store + self.index.add(np.array([embedding])) + self.documents.append(doc) + self.document_embeddings.append(embedding) + + def retrieve_relevant_docs(self, query: str) -> List[str]: + """ + Retrieve most relevant documents for a given query + + Args: + query (str): Input query to find relevant documents + + Returns: + List[str]: Top k most relevant documents + """ + # Embed query + query_embedding = self.embedding_model.encode(query) + + # Perform similarity search + distances, indices = self.index.search( + np.array([query_embedding]), + self.top_k_retrieval + ) + + # Retrieve and return top documents + return [self.documents[i] for i in indices[0]] + + def generate_response(self, query: str) -> str: + """ + Generate a response using retrieval-augmented generation + + Args: + query (str): User's input query + + Returns: + str: Generated response incorporating retrieved context + """ + # Retrieve relevant documents + retrieved_docs = self.retrieve_relevant_docs(query) + + # Construct context-aware prompt + context = "\n\n".join([f"Context Document: {doc}" for doc in retrieved_docs]) + full_prompt = f""" + Context: + {context} + + Query: {query} + + Generate a comprehensive and informative response that: + 1. Uses the provided context documents + 2. Directly answers the query + 3. Incorporates relevant information from the context + """ + + # Prepare API arguments + api_kwargs = self.model_client.convert_inputs_to_api_kwargs( + input=full_prompt, + model_kwargs=self.model_kwargs, + model_type=ModelType.LLM + ) + + # Call API and parse response + response = self.model_client.call( + api_kwargs=api_kwargs, + model_type=ModelType.LLM + ) + response_text = self.model_client.parse_chat_completion(response) + + # Update conversation history + self.conversation_history += f"\nQuery: {query}\nResponse: {response_text}" + + return response_text + +**Running the Pipeline** + +- **Pipeline Workflow**: + 1. Initializes the ``AdalflowRAGPipeline``. + 2. Adds documents to the knowledge base. + 3. Processes each query to retrieve documents and generate responses. + + +.. code-block:: python + + def run_rag_pipeline(model_client, model_kwargs, documents, queries): + rag_pipeline = AdalflowRAGPipeline(model_client=model_client, model_kwargs=model_kwargs) + + rag_pipeline.add_documents(documents) + + # Generate responses + for query in queries: + print(f"\nQuery: {query}") + response = rag_pipeline.generate_response(query) + print(f"Response: {response}") + +- **Documents**: Serve as the knowledge base for validation. +- **Queries**: Designed to test retrieval and generation specific to document content. + +.. code-block:: python + + # ajithvcoder's statements are added so that we can validate that the LLM is generating from these lines only + documents = [ + "ajithvcoder is a good person whom the world knows as Ajith Kumar, ajithvcoder is his nick name that AjithKumar gave himself", + "The Eiffel Tower is a famous landmark in Paris, built in 1889 for the World's Fair.", + "ajithvcoder likes Hyderabadi panner dum briyani much.", + "The Louvre Museum in Paris is the world's largest art museum, housing thousands of works of art.", + "ajithvcoder has a engineering degree and he graduated on May, 2016." + ] + + # Questions related to ajithvcoder's are added so that we can validate + # that the LLM is generating from above given lines only + queries = [ + "Does Ajith Kumar has any nick name ?", + "What is the ajithvcoder's favourite food?", + "When did ajithvcoder graduated ?" + ] + +**API Integration** + +- **Generic API Client**: Demonstrates flexibility in using different LLM APIs like Groq and OpenAI without altering the core pipeline logic. + +.. code-block:: python + + groq_model_kwargs = { + "model": "llama-3.2-1b-preview", # Use 16k model for larger context + "temperature": 0.1, + "max_tokens": 800, + } + + openai_model_kwargs = { + "model": "gpt-3.5-turbo", # Use 16k model for larger context + "temperature": 0.1, + "max_tokens": 800, + } + + # Below example shows that adalflow can be used in a genric manner for any api provider + # without worrying about prompt and parsing results + model_client = GroqAPIClient() + run_rag_pipeline(model_client, groq_model_kwargs, documents, queries) + run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries) + + +.. admonition:: API reference + :class: highlight + + - :class:`utils.setup_env` + - :class:`core.types.ModelType` + - :class:`components.model_client.OpenAIClient` + - :class:`components.model_client.GroqAPIClient` From b5c40a4f4861bd91d7111407a1abf6f534056ff7 Mon Sep 17 00:00:00 2001 From: Li Yin Date: Sat, 14 Dec 2024 14:26:57 -0800 Subject: [PATCH 39/40] revert pr 296, the rag does not meet our standard and it is not a showcase on the library but pretty general self built rag --- docs/source/use_cases/rag_documents.rst | 322 ------------- docs/source/use_cases/rag_vanilla.rst | 263 ----------- .../tutorials/adalflow_rag_documents.ipynb | 443 ------------------ .../tutorials/adalflow_rag_vanilla.ipynb | 376 --------------- tutorials/adalflow_rag_documents.py | 248 ---------- tutorials/adalflow_rag_vanilla.py | 188 -------- ...nescent Guardians of the Desert Canyon.txt | 6 - ...nigmatic Crystal Cavern of Lake Aurora.txt | 6 - .../The Legend of the Moonshade Willow.txt | 6 - 9 files changed, 1858 deletions(-) delete mode 100644 docs/source/use_cases/rag_documents.rst delete mode 100644 docs/source/use_cases/rag_vanilla.rst delete mode 100644 notebooks/tutorials/adalflow_rag_documents.ipynb delete mode 100644 notebooks/tutorials/adalflow_rag_vanilla.ipynb delete mode 100644 tutorials/adalflow_rag_documents.py delete mode 100644 tutorials/adalflow_rag_vanilla.py delete mode 100644 tutorials/assets/documents/The Bioluminescent Guardians of the Desert Canyon.txt delete mode 100644 tutorials/assets/documents/The Enigmatic Crystal Cavern of Lake Aurora.txt delete mode 100644 tutorials/assets/documents/The Legend of the Moonshade Willow.txt diff --git a/docs/source/use_cases/rag_documents.rst b/docs/source/use_cases/rag_documents.rst deleted file mode 100644 index 26c8a430..00000000 --- a/docs/source/use_cases/rag_documents.rst +++ /dev/null @@ -1,322 +0,0 @@ -.. raw:: html - - - -RAG for documents -============================= - -Overview --------- - -This implementation showcases an end-to-end RAG system capable of handling large-scale text files and -generating context-aware responses. It is both modular and extensible, making it adaptable to various -use cases and LLM APIs. - -**Imports** - -- **SentenceTransformer**: Used for creating dense vector embeddings for textual data. -- **FAISS**: Provides efficient similarity search using vector indexing. -- **tiktoken**: ensures that the text preprocessing aligns with the tokenization requirements of the underlying language models, making the pipeline robust and efficient. -- **GroqAPIClient and OpenAIClient**: Custom classes for interacting with different LLM providers. -- **ModelType**: Enum for specifying the model type. - -.. code-block:: python - - import os - import tiktoken - from typing import List, Dict, Tuple - import numpy as np - from sentence_transformers import SentenceTransformer - from faiss import IndexFlatL2 - - from adalflow.components.model_client import GroqAPIClient, OpenAIClient - from adalflow.core.types import ModelType - from adalflow.utils import setup_env - -The ``AdalflowRAGPipeline`` class sets up the Retrieval-Augmented Generation (RAG) pipeline. Its ``__init__`` method initializes key components: - -- An embedding model (``all-MiniLM-L6-v2``) is loaded using ``SentenceTransformer`` to convert text into dense vector embeddings with a dimensionality of 384. -- A FAISS index (``IndexFlatL2``) is created for similarity-based document retrieval. -- Parameters such as ``top_k_retrieval`` (number of documents to retrieve) and ``max_context_tokens`` (limit on token count in the context) are configured. -- A tokenizer (``tiktoken``) ensures precise token counting, crucial for handling large language models (LLMs). - -The method also initializes storage for documents, their embeddings, and associated metadata for efficient management and retrieval. - -The ``AdalflowRAGPipeline`` class provides a flexible pipeline for Retrieval-Augmented Generation (RAG), -initializing with parameters such as the embedding model (``all-MiniLM-L6-v2`` by default), vector dimension, -top-k retrieval count, and token limits for context. It utilizes a tokenizer for token counting, a -SentenceTransformer for embeddings, and a FAISS index for similarity searches, while also maintaining -document data and metadata. The ``load_text_file`` method processes large text files into manageable chunks -by splitting the content into fixed line groups, facilitating easier embedding and storage. To handle -multiple files, ``add_documents_from_directory`` iterates over text files in a directory, embeds the content, -and stores them in the FAISS index along with metadata. Token counting is achieved via the ``count_tokens`` -method, leveraging a tokenizer to precisely determine the number of tokens in a given text. The -``retrieve_and_truncate_context`` method fetches the most relevant documents from the FAISS index based on -query embeddings, truncating the context to adhere to token limits. Finally, the ``generate_response`` method -constructs a comprehensive prompt by combining the retrieved context and query, invokes the provided model -client for a response, and parses the results into a readable format. This pipeline demonstrates seamless -integration of text retrieval and generation to handle large-scale document queries effectively. - - -.. code-block:: python - - class AdalflowRAGPipeline: - def __init__(self, - model_client=None, - model_kwargs=None, - embedding_model='all-MiniLM-L6-v2', - vector_dim=384, - top_k_retrieval=3, - max_context_tokens=800): - """ - Initialize RAG Pipeline for handling large text files - - Args: - embedding_model (str): Sentence transformer model for embeddings - vector_dim (int): Dimension of embedding vectors - top_k_retrieval (int): Number of documents to retrieve - max_context_tokens (int): Maximum tokens to send to LLM - """ - # Initialize model client for generation - self.model_client = model_client - - # Initialize tokenizer for precise token counting - self.tokenizer = tiktoken.get_encoding("cl100k_base") - - # Initialize embedding model - self.embedding_model = SentenceTransformer(embedding_model) - - # Initialize FAISS index for vector similarity search - self.index = IndexFlatL2(vector_dim) - - # Store document texts, embeddings, and metadata - self.documents = [] - self.document_embeddings = [] - self.document_metadata = [] - - # Retrieval and context management parameters - self.top_k_retrieval = top_k_retrieval - self.max_context_tokens = max_context_tokens - - # Model generation parameters - self.model_kwargs = model_kwargs - - def load_text_file(self, file_path: str) -> List[str]: - """ - Load a large text file and split into manageable chunks - - Args: - file_path (str): Path to the text file - - Returns: - List[str]: List of document chunks - """ - with open(file_path, 'r', encoding='utf-8') as file: - # Read entire file - content = file.read() - - # Split content into chunks (e.g., 10 lines per chunk) - lines = content.split('\n') - chunks = [] - chunk_size = 10 # Adjust based on your file structure - - for i in range(0, len(lines), chunk_size): - chunk = '\n'.join(lines[i:i+chunk_size]) - chunks.append(chunk) - - return chunks - - def add_documents_from_directory(self, directory_path: str): - """ - Add documents from all text files in a directory - - Args: - directory_path (str): Path to directory containing text files - """ - for filename in os.listdir(directory_path): - if filename.endswith('.txt'): - file_path = os.path.join(directory_path, filename) - document_chunks = self.load_text_file(file_path) - - for chunk in document_chunks: - # Embed document chunk - embedding = self.embedding_model.encode(chunk) - - # Add to index and document store - self.index.add(np.array([embedding])) - self.documents.append(chunk) - self.document_embeddings.append(embedding) - self.document_metadata.append({ - 'filename': filename, - 'chunk_index': len(self.document_metadata) - }) - - def count_tokens(self, text: str) -> int: - """ - Count tokens in a given text - - Args: - text (str): Input text - - Returns: - int: Number of tokens - """ - return len(self.tokenizer.encode(text)) - - def retrieve_and_truncate_context(self, query: str) -> str: - """ - Retrieve relevant documents and truncate to fit token limit - - Args: - query (str): Input query - - Returns: - str: Concatenated context within token limit - """ - # Retrieve relevant documents - query_embedding = self.embedding_model.encode(query) - distances, indices = self.index.search( - np.array([query_embedding]), - self.top_k_retrieval - ) - - # Collect and truncate context - context = [] - current_tokens = 0 - - for idx in indices[0]: - doc = self.documents[idx] - doc_tokens = self.count_tokens(doc) - - # Check if adding this document would exceed token limit - if current_tokens + doc_tokens <= self.max_context_tokens: - context.append(doc) - current_tokens += doc_tokens - else: - break - - return "\n\n".join(context) - - def generate_response(self, query: str) -> str: - """ - Generate a response using retrieval-augmented generation - - Args: - query (str): User's input query - - Returns: - str: Generated response incorporating retrieved context - """ - # Retrieve and truncate context - retrieved_context = self.retrieve_and_truncate_context(query) - - # Construct context-aware prompt - full_prompt = f""" - Context Documents: - {retrieved_context} - - Query: {query} - - Generate a comprehensive response that: - 1. Directly answers the query - 2. Incorporates relevant information from the context documents - 3. Provides clear and concise information - """ - - # Prepare API arguments - api_kwargs = self.model_client.convert_inputs_to_api_kwargs( - input=full_prompt, - model_kwargs=self.model_kwargs, - model_type=ModelType.LLM - ) - - # Call API and parse response - response = self.model_client.call( - api_kwargs=api_kwargs, - model_type=ModelType.LLM - ) - response_text = self.model_client.parse_chat_completion(response) - - return response_text - -The ``run_rag_pipeline`` function demonstrates how to use the ``AdalflowRAGPipeline``. It initializes the pipeline, -adds documents from a directory, and generates responses for a list of user queries. The function is generic -and can accommodate various LLM API clients, such as GroqAPIClient or OpenAIClient, highlighting the pipeline's -flexibility and modularity. - - -.. code-block:: python - - def run_rag_pipeline(model_client, model_kwargs, documents, queries): - - # Example usage of RAG pipeline - rag_pipeline = AdalflowRAGPipeline( - model_client=model_client, - model_kwargs=model_kwargs, - top_k_retrieval=1, # Retrieve top 1 most relevant chunks - max_context_tokens=800 # Limit context to 1500 tokens - ) - - # Add documents from a directory of text files - rag_pipeline.add_documents_from_directory(documents) - - # Generate responses - for query in queries: - print(f"\nQuery: {query}") - response = rag_pipeline.generate_response(query) - print(f"Response: {response}") - - -This block provides an example of running the pipeline with different models and queries. It specifies: - -- The document directory containing the text files. -- Example queries about topics such as the "Crystal Cavern" and "rare trees in Elmsworth." -- Configuration for Groq and OpenAI model parameters, including the model type, temperature, and token limits. - -.. code-block:: python - - documents = '../../tutorials/assets/documents' - - queries = [ - "What year was the Crystal Cavern discovered?", - "What is the name of the rare tree in Elmsworth?", - "What local legend claim that Lunaflits surrounds?" - ] - - groq_model_kwargs = { - "model": "llama-3.2-1b-preview", # Use 16k model for larger context - "temperature": 0.1, - "max_tokens": 800, - } - - openai_model_kwargs = { - "model": "gpt-3.5-turbo", - "temperature": 0.1, - "max_tokens": 800, - } - # Below example shows that adalflow can be used in a genric manner for any api provider - # without worrying about prompt and parsing results - run_rag_pipeline(GroqAPIClient(), groq_model_kwargs, documents, queries) - run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries) - -The example emphasizes that ``AdalflowRAGPipeline`` can interact seamlessly with multiple API providers, -enabling integration with diverse LLMs without modifying the core logic for prompt construction or -response parsing. - - -.. admonition:: API reference - :class: highlight - - - :class:`utils.setup_env` - - :class:`core.types.ModelType` - - :class:`components.model_client.OpenAIClient` - - :class:`components.model_client.GroqAPIClient` diff --git a/docs/source/use_cases/rag_vanilla.rst b/docs/source/use_cases/rag_vanilla.rst deleted file mode 100644 index 1a80bbbf..00000000 --- a/docs/source/use_cases/rag_vanilla.rst +++ /dev/null @@ -1,263 +0,0 @@ -.. raw:: html - - - -RAG Vanilla -============================= - -Overview --------- - -The **RAG Vanilla** implementation is a Retrieval-Augmented Generation pipeline that integrates document -retrieval with natural language generation. This approach allows users to retrieve contextually relevant -documents from a knowledge base and generate informative responses. The code leverages components such as -sentence embeddings, FAISS indexing, and a large language model (LLM) API client. - - -**Imports** - -- **SentenceTransformer**: Used for creating dense vector embeddings for textual data. -- **FAISS**: Provides efficient similarity search using vector indexing. -- **GroqAPIClient and OpenAIClient**: Custom classes for interacting with different LLM providers. -- **ModelType**: Enum for specifying the model type. - -.. code-block:: python - - import os - from typing import List, Dict - import numpy as np - from sentence_transformers import SentenceTransformer - from faiss import IndexFlatL2 - - from adalflow.components.model_client import GroqAPIClient, OpenAIClient - from adalflow.core.types import ModelType - from adalflow.utils import setup_env - - -**Pipeline Initialization** - -- **Model Client**: Abstracts the API calls to the chosen LLM provider. -- **Embedding Model**: Defaulted to ``all-MiniLM-L6-v2``, generates 384-dimensional embeddings. -- **Vector Dimension**: Dimensionality of the embedding vectors generated by the embedding model ``all-MiniLM-L6-v2``. -- **Top K Retrieval**: Specifies the number of most relevant documents to retrieve. - -The ``add_documents()`` function encodes the documents into embeddings and stores them in the FAISS index. It -uses the SentenceTransformer model to generate vector representations of the text. These embeddings are then -added to the FAISS index for efficient similarity search and are also stored in a list for later retrieval. - -The ``retrieve_relevant_docs()`` function takes a query string as input and retrieves the top k documents -that are most relevant to the query based on their similarity. The query is first encoded into an embedding, -and then the FAISS index is used to perform a similarity search to identify the documents that are closest -in meaning to the query. - -The ``generate_response()`` function constructs a context-aware prompt by combining the retrieved documents -with the user's query. It then calls the model_client to generate a response from the language model. The -conversation history is also maintained, logging each query and its corresponding response for future -reference and context. - - -.. code-block:: python - - class AdalflowRAGPipeline: - def __init__(self, - model_client = None, - model_kwargs = None, - embedding_model='all-MiniLM-L6-v2', - vector_dim=384, - top_k_retrieval=1): - """ - Initialize RAG Pipeline with embedding and retrieval components - - Args: - embedding_model (str): Sentence transformer model for embeddings - vector_dim (int): Dimension of embedding vectors - top_k_retrieval (int): Number of documents to retrieve - """ - # Initialize model client for generation - self.model_client = model_client - - # Initialize embedding model - self.embedding_model = SentenceTransformer(embedding_model) - - # Initialize FAISS index for vector similarity search - self.index = IndexFlatL2(vector_dim) - - # Store document texts and their embeddings - self.documents = [] - self.document_embeddings = [] - - # Retrieval parameters - self.top_k_retrieval = top_k_retrieval - - # Conversation history and context - self.conversation_history = "" - self.model_kwargs = model_kwargs - - def add_documents(self, documents: List[str]): - """ - Add documents to the RAG pipeline's knowledge base - - Args: - documents (List[str]): List of document texts to add - """ - for doc in documents: - # Embed document - embedding = self.embedding_model.encode(doc) - - # Add to index and document store - self.index.add(np.array([embedding])) - self.documents.append(doc) - self.document_embeddings.append(embedding) - - def retrieve_relevant_docs(self, query: str) -> List[str]: - """ - Retrieve most relevant documents for a given query - - Args: - query (str): Input query to find relevant documents - - Returns: - List[str]: Top k most relevant documents - """ - # Embed query - query_embedding = self.embedding_model.encode(query) - - # Perform similarity search - distances, indices = self.index.search( - np.array([query_embedding]), - self.top_k_retrieval - ) - - # Retrieve and return top documents - return [self.documents[i] for i in indices[0]] - - def generate_response(self, query: str) -> str: - """ - Generate a response using retrieval-augmented generation - - Args: - query (str): User's input query - - Returns: - str: Generated response incorporating retrieved context - """ - # Retrieve relevant documents - retrieved_docs = self.retrieve_relevant_docs(query) - - # Construct context-aware prompt - context = "\n\n".join([f"Context Document: {doc}" for doc in retrieved_docs]) - full_prompt = f""" - Context: - {context} - - Query: {query} - - Generate a comprehensive and informative response that: - 1. Uses the provided context documents - 2. Directly answers the query - 3. Incorporates relevant information from the context - """ - - # Prepare API arguments - api_kwargs = self.model_client.convert_inputs_to_api_kwargs( - input=full_prompt, - model_kwargs=self.model_kwargs, - model_type=ModelType.LLM - ) - - # Call API and parse response - response = self.model_client.call( - api_kwargs=api_kwargs, - model_type=ModelType.LLM - ) - response_text = self.model_client.parse_chat_completion(response) - - # Update conversation history - self.conversation_history += f"\nQuery: {query}\nResponse: {response_text}" - - return response_text - -**Running the Pipeline** - -- **Pipeline Workflow**: - 1. Initializes the ``AdalflowRAGPipeline``. - 2. Adds documents to the knowledge base. - 3. Processes each query to retrieve documents and generate responses. - - -.. code-block:: python - - def run_rag_pipeline(model_client, model_kwargs, documents, queries): - rag_pipeline = AdalflowRAGPipeline(model_client=model_client, model_kwargs=model_kwargs) - - rag_pipeline.add_documents(documents) - - # Generate responses - for query in queries: - print(f"\nQuery: {query}") - response = rag_pipeline.generate_response(query) - print(f"Response: {response}") - -- **Documents**: Serve as the knowledge base for validation. -- **Queries**: Designed to test retrieval and generation specific to document content. - -.. code-block:: python - - # ajithvcoder's statements are added so that we can validate that the LLM is generating from these lines only - documents = [ - "ajithvcoder is a good person whom the world knows as Ajith Kumar, ajithvcoder is his nick name that AjithKumar gave himself", - "The Eiffel Tower is a famous landmark in Paris, built in 1889 for the World's Fair.", - "ajithvcoder likes Hyderabadi panner dum briyani much.", - "The Louvre Museum in Paris is the world's largest art museum, housing thousands of works of art.", - "ajithvcoder has a engineering degree and he graduated on May, 2016." - ] - - # Questions related to ajithvcoder's are added so that we can validate - # that the LLM is generating from above given lines only - queries = [ - "Does Ajith Kumar has any nick name ?", - "What is the ajithvcoder's favourite food?", - "When did ajithvcoder graduated ?" - ] - -**API Integration** - -- **Generic API Client**: Demonstrates flexibility in using different LLM APIs like Groq and OpenAI without altering the core pipeline logic. - -.. code-block:: python - - groq_model_kwargs = { - "model": "llama-3.2-1b-preview", # Use 16k model for larger context - "temperature": 0.1, - "max_tokens": 800, - } - - openai_model_kwargs = { - "model": "gpt-3.5-turbo", # Use 16k model for larger context - "temperature": 0.1, - "max_tokens": 800, - } - - # Below example shows that adalflow can be used in a genric manner for any api provider - # without worrying about prompt and parsing results - model_client = GroqAPIClient() - run_rag_pipeline(model_client, groq_model_kwargs, documents, queries) - run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries) - - -.. admonition:: API reference - :class: highlight - - - :class:`utils.setup_env` - - :class:`core.types.ModelType` - - :class:`components.model_client.OpenAIClient` - - :class:`components.model_client.GroqAPIClient` diff --git a/notebooks/tutorials/adalflow_rag_documents.ipynb b/notebooks/tutorials/adalflow_rag_documents.ipynb deleted file mode 100644 index 373f6bae..00000000 --- a/notebooks/tutorials/adalflow_rag_documents.ipynb +++ /dev/null @@ -1,443 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 🤗 Welcome to AdalFlow!\n", - "## The PyTorch library to auto-optimize any LLM task pipelines\n", - "\n", - "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", - "\n", - "\n", - "# Quick Links\n", - "\n", - "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", - "\n", - "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", - "\n", - "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", - "\n", - "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", - "\n", - "# Author\n", - "This notebook was created by community contributor [Ajith](https://github.com/ajithvcoder/).\n", - "\n", - "# Outline\n", - "\n", - "This is a quick introduction of what AdalFlow is capable of. We will cover:\n", - "\n", - "* How to use adalflow for rag with documents\n", - "\n", - "Adalflow can be used in a genric manner for any api provider without worrying much about prompt, \n", - "model args and parsing results\n", - "\n", - "**Next: Try our [adalflow-text-splitter](\"https://colab.research.google.com/github.com/SylphAI-Inc/AdalFlow/blob/main/notebooks/tutorials/adalflow_text_splitter.ipynb\")**\n", - "\n", - "\n", - "# Installation\n", - "\n", - "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq`, and `faiss`(cpu version) from the extra packages.\n", - "\n", - " ```bash\n", - " pip install torch --index-url https://download.pytorch.org/whl/cpu\n", - " pip install sentence-transformers==3.3.1\n", - " pip install adalflow[openai,groq,faiss-cpu]\n", - " ```\n", - "2. Setup `openai` and `groq` API key in the environment variables" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Set Environment Variables\n", - "\n", - "Note: Enter your api keys in below cell #todo" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Overwriting .env\n" - ] - } - ], - "source": [ - "%%writefile .env\n", - "\n", - "OPENAI_API_KEY=\"PASTE-OPENAI_API_KEY_HERE\"\n", - "GROQ_API_KEY=\"PASTE-GROQ_API_KEY-HERE\"" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from adalflow.utils import setup_env\n", - "\n", - "# Load environment variables - Make sure to have OPENAI_API_KEY in .env file and .env is present in current folder\n", - "setup_env(\".env\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/ajithdev/AdalFlow/.venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], - "source": [ - "import os\n", - "import tiktoken\n", - "from typing import List, Dict, Tuple\n", - "import numpy as np\n", - "from sentence_transformers import SentenceTransformer\n", - "from faiss import IndexFlatL2\n", - "\n", - "from adalflow.components.model_client import GroqAPIClient, OpenAIClient\n", - "from adalflow.core.types import ModelType\n", - "from adalflow.utils import setup_env" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`AdalflowRAGPipeline` is a class that implements a Retrieval-Augmented Generation (RAG) pipeline with adalflow using documents. It has:\n", - "\n", - "- Efficient RAG Pipeline for handling large text files, embedding, and retrieval.\n", - "- Supports token management and context truncation for LLM integration.\n", - "- Generates accurate responses using retrieval-augmented generation (RAG)." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "class AdalflowRAGPipeline:\n", - " def __init__(self,\n", - " model_client=None,\n", - " model_kwargs=None,\n", - " embedding_model='all-MiniLM-L6-v2', \n", - " vector_dim=384, \n", - " top_k_retrieval=3,\n", - " max_context_tokens=800):\n", - " \"\"\"\n", - " Initialize RAG Pipeline for handling large text files\n", - " \n", - " Args:\n", - " embedding_model (str): Sentence transformer model for embeddings\n", - " vector_dim (int): Dimension of embedding vectors\n", - " top_k_retrieval (int): Number of documents to retrieve\n", - " max_context_tokens (int): Maximum tokens to send to LLM\n", - " \"\"\"\n", - " # Initialize model client for generation\n", - " self.model_client = model_client\n", - " \n", - " # Initialize tokenizer for precise token counting\n", - " self.tokenizer = tiktoken.get_encoding(\"cl100k_base\")\n", - " \n", - " # Initialize embedding model\n", - " self.embedding_model = SentenceTransformer(embedding_model)\n", - " \n", - " # Initialize FAISS index for vector similarity search\n", - " self.index = IndexFlatL2(vector_dim)\n", - " \n", - " # Store document texts, embeddings, and metadata\n", - " self.documents = []\n", - " self.document_embeddings = []\n", - " self.document_metadata = []\n", - " \n", - " # Retrieval and context management parameters\n", - " self.top_k_retrieval = top_k_retrieval\n", - " self.max_context_tokens = max_context_tokens\n", - " \n", - " # Model generation parameters\n", - " self.model_kwargs = model_kwargs\n", - "\n", - " def load_text_file(self, file_path: str) -> List[str]:\n", - " \"\"\"\n", - " Load a large text file and split into manageable chunks\n", - " \n", - " Args:\n", - " file_path (str): Path to the text file\n", - " \n", - " Returns:\n", - " List[str]: List of document chunks\n", - " \"\"\"\n", - " with open(file_path, 'r', encoding='utf-8') as file:\n", - " # Read entire file\n", - " content = file.read()\n", - " \n", - " # Split content into chunks (e.g., 10 lines per chunk)\n", - " lines = content.split('\\n')\n", - " chunks = []\n", - " chunk_size = 10 # Adjust based on your file structure\n", - " \n", - " for i in range(0, len(lines), chunk_size):\n", - " chunk = '\\n'.join(lines[i:i+chunk_size])\n", - " chunks.append(chunk)\n", - " \n", - " return chunks\n", - "\n", - " def add_documents_from_directory(self, directory_path: str):\n", - " \"\"\"\n", - " Add documents from all text files in a directory\n", - " \n", - " Args:\n", - " directory_path (str): Path to directory containing text files\n", - " \"\"\"\n", - " for filename in os.listdir(directory_path):\n", - " if filename.endswith('.txt'):\n", - " file_path = os.path.join(directory_path, filename)\n", - " document_chunks = self.load_text_file(file_path)\n", - " \n", - " for chunk in document_chunks:\n", - " # Embed document chunk\n", - " embedding = self.embedding_model.encode(chunk)\n", - " \n", - " # Add to index and document store\n", - " self.index.add(np.array([embedding]))\n", - " self.documents.append(chunk)\n", - " self.document_embeddings.append(embedding)\n", - " self.document_metadata.append({\n", - " 'filename': filename,\n", - " 'chunk_index': len(self.document_metadata)\n", - " })\n", - "\n", - " def count_tokens(self, text: str) -> int:\n", - " \"\"\"\n", - " Count tokens in a given text\n", - " \n", - " Args:\n", - " text (str): Input text\n", - " \n", - " Returns:\n", - " int: Number of tokens\n", - " \"\"\"\n", - " return len(self.tokenizer.encode(text))\n", - "\n", - " def retrieve_and_truncate_context(self, query: str) -> str:\n", - " \"\"\"\n", - " Retrieve relevant documents and truncate to fit token limit\n", - " \n", - " Args:\n", - " query (str): Input query\n", - " \n", - " Returns:\n", - " str: Concatenated context within token limit\n", - " \"\"\"\n", - " # Retrieve relevant documents\n", - " query_embedding = self.embedding_model.encode(query)\n", - " distances, indices = self.index.search(\n", - " np.array([query_embedding]), \n", - " self.top_k_retrieval\n", - " )\n", - " \n", - " # Collect and truncate context\n", - " context = []\n", - " current_tokens = 0\n", - " \n", - " for idx in indices[0]:\n", - " doc = self.documents[idx]\n", - " doc_tokens = self.count_tokens(doc)\n", - " \n", - " # Check if adding this document would exceed token limit\n", - " if current_tokens + doc_tokens <= self.max_context_tokens:\n", - " context.append(doc)\n", - " current_tokens += doc_tokens\n", - " else:\n", - " break\n", - " \n", - " return \"\\n\\n\".join(context)\n", - "\n", - " def generate_response(self, query: str) -> str:\n", - " \"\"\"\n", - " Generate a response using retrieval-augmented generation\n", - " \n", - " Args:\n", - " query (str): User's input query\n", - " \n", - " Returns:\n", - " str: Generated response incorporating retrieved context\n", - " \"\"\"\n", - " # Retrieve and truncate context\n", - " retrieved_context = self.retrieve_and_truncate_context(query)\n", - " \n", - " # Construct context-aware prompt\n", - " full_prompt = f\"\"\"\n", - " Context Documents:\n", - " {retrieved_context}\n", - " \n", - " Query: {query}\n", - " \n", - " Generate a comprehensive response that:\n", - " 1. Directly answers the query\n", - " 2. Incorporates relevant information from the context documents\n", - " 3. Provides clear and concise information\n", - " \"\"\"\n", - " \n", - " # Prepare API arguments\n", - " api_kwargs = self.model_client.convert_inputs_to_api_kwargs(\n", - " input=full_prompt,\n", - " model_kwargs=self.model_kwargs,\n", - " model_type=ModelType.LLM\n", - " )\n", - " \n", - " # Call API and parse response\n", - " response = self.model_client.call(\n", - " api_kwargs=api_kwargs, \n", - " model_type=ModelType.LLM\n", - " )\n", - " response_text = self.model_client.parse_chat_completion(response)\n", - " \n", - " return response_text\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`run_rag_pipeline` demonstrates how to use the AdalflowRAGPipeline to handle retrieval-augmented generation. It initializes the pipeline with specified retrieval and context token limits, loads documents from a directory, and processes a list of queries. For each query, the function retrieves relevant context, generates a response using the pipeline, and prints the results." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "def run_rag_pipeline(model_client, model_kwargs, documents, queries):\n", - "\n", - " # Example usage of RAG pipeline\n", - " rag_pipeline = AdalflowRAGPipeline(\n", - " model_client=model_client,\n", - " model_kwargs=model_kwargs,\n", - " top_k_retrieval=1, # Retrieve top 1 most relevant chunks\n", - " max_context_tokens=800 # Limit context to 1500 tokens\n", - " )\n", - "\n", - " # Add documents from a directory of text files\n", - " rag_pipeline.add_documents_from_directory(documents)\n", - " \n", - " # Generate responses\n", - " for query in queries:\n", - " print(f\"\\nQuery: {query}\")\n", - " response = rag_pipeline.generate_response(query)\n", - " print(f\"Response: {response}\")\n" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Query: What year was the Crystal Cavern discovered?\n", - "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=14, prompt_tokens=203, total_tokens=217), raw_response='The Crystal Cavern was discovered in 1987 by divers.', metadata=None)\n", - "\n", - "Query: What is the name of the rare tree in Elmsworth?\n", - "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=17, prompt_tokens=212, total_tokens=229), raw_response='The rare tree in Elmsworth is known as the \"Moonshade Willow\".', metadata=None)\n", - "\n", - "Query: What local legend claim that Lunaflits surrounds?\n", - "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=19, prompt_tokens=206, total_tokens=225), raw_response='Local legend claims that Lunaflits are guardians of ancient treasure buried deep within the canyon.', metadata=None)\n", - "\n", - "Query: What year was the Crystal Cavern discovered?\n", - "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=107, prompt_tokens=184, total_tokens=291), raw_response='The Crystal Cavern was discovered by divers in the year 1987 beneath the icy waters of Lake Aurora. The cavern is known for its shimmering quartz formations that refract sunlight into a spectrum of colors, and it is believed to have served as a sanctuary for an ancient civilization that revered the crystals as conduits to the spirit world. Artifacts recovered from the cavern are carved with intricate symbols, indicating a deep connection to celestial events. However, accessing the cavern is dangerous due to the freezing temperatures and strong currents of the lake.', metadata=None)\n", - "\n", - "Query: What is the name of the rare tree in Elmsworth?\n", - "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=104, prompt_tokens=193, total_tokens=297), raw_response='The rare tree in Elmsworth is called the \"Moonshade Willow.\" It blooms once every seven years, emitting a soft glow from its blossoms. Villagers believe that meditating under its branches brings vivid dreams of the future. The tree\\'s bark contains a secret resin used in ancient healing rituals. Elders claim that the Moonshade Willow was a gift from a goddess to protect the village. Researchers have found that the tree can only thrive in Elmsworth\\'s unique soil, making it impossible to cultivate elsewhere.', metadata=None)\n", - "\n", - "Query: What local legend claim that Lunaflits surrounds?\n", - "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=100, prompt_tokens=187, total_tokens=287), raw_response='Local legends claim that Lunaflits, the glowing insects found in the remote desert canyon, are believed to be guardians of ancient treasure buried deep within the canyon. These creatures emit a constant, soothing green light that illuminates the canyon at night, and their rhythmic light pulses form intricate patterns, suggesting a form of communication among them. The ethereal glow created by the Lunaflits and the rare moss reflecting their light have contributed to the mystical reputation of these insects as protectors of hidden riches.', metadata=None)\n" - ] - } - ], - "source": [ - "# setup_env()\n", - "\n", - "documents = '../../tutorials/assets/documents'\n", - "\n", - "queries = [\n", - " \"What year was the Crystal Cavern discovered?\",\n", - " \"What is the name of the rare tree in Elmsworth?\",\n", - " \"What local legend claim that Lunaflits surrounds?\"\n", - "]\n", - "\n", - "groq_model_kwargs = {\n", - " \"model\": \"llama-3.2-1b-preview\", # Use 16k model for larger context\n", - " \"temperature\": 0.1,\n", - " \"max_tokens\": 800,\n", - "}\n", - "\n", - "openai_model_kwargs = {\n", - " \"model\": \"gpt-3.5-turbo\",\n", - " \"temperature\": 0.1,\n", - " \"max_tokens\": 800,\n", - "}\n", - "# Below example shows that adalflow can be used in a genric manner for any api provider\n", - "# without worrying about prompt and parsing results\n", - "run_rag_pipeline(GroqAPIClient(), groq_model_kwargs, documents, queries)\n", - "run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/tutorials/adalflow_rag_vanilla.ipynb b/notebooks/tutorials/adalflow_rag_vanilla.ipynb deleted file mode 100644 index 34a53174..00000000 --- a/notebooks/tutorials/adalflow_rag_vanilla.ipynb +++ /dev/null @@ -1,376 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 🤗 Welcome to AdalFlow!\n", - "## The PyTorch library to auto-optimize any LLM task pipelines\n", - "\n", - "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", - "\n", - "\n", - "# Quick Links\n", - "\n", - "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", - "\n", - "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", - "\n", - "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", - "\n", - "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", - "\n", - "# Author\n", - "This notebook was created by community contributor [Ajith](https://github.com/ajithvcoder/).\n", - "\n", - "# Outline\n", - "\n", - "This is a quick introduction of what AdalFlow is capable of. We will cover:\n", - "\n", - "* How to use adalflow for rag\n", - "\n", - "Adalflow can be used in a genric manner for any api provider without worrying much about prompt, \n", - "model args and parsing results\n", - "\n", - "**Next: Try our [adalflow-rag-for-documents](\"https://colab.research.google.com/github.com/SylphAI-Inc/AdalFlow/blob/main/notebooks/tutorials/adalflow_rag_documents.ipynb\")**\n", - "\n", - "\n", - "# Installation\n", - "\n", - "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq`, and `faiss`(cpu version) from the extra packages.\n", - "\n", - " ```bash\n", - " pip install torch --index-url https://download.pytorch.org/whl/cpu\n", - " pip install sentence-transformers==3.3.1\n", - " pip install adalflow[openai,groq,faiss-cpu]\n", - " ```\n", - "2. Setup `openai` and `groq` API key in the environment variables" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Set Environment Variables\n", - "\n", - "Note: Enter your api keys in below cell #todo" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Overwriting .env\n" - ] - } - ], - "source": [ - "%%writefile .env\n", - "\n", - "OPENAI_API_KEY=\"PASTE-OPENAI_API_KEY_HERE\"\n", - "GROQ_API_KEY=\"PASTE-GROQ_API_KEY-HERE\"" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from adalflow.utils import setup_env\n", - "\n", - "# Load environment variables - Make sure to have OPENAI_API_KEY in .env file and .env is present in current folder\n", - "setup_env(\".env\")" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/workspace/ajithdev/AdalFlow/.venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], - "source": [ - "import os\n", - "from typing import List, Dict\n", - "import numpy as np\n", - "from sentence_transformers import SentenceTransformer\n", - "from faiss import IndexFlatL2\n", - "\n", - "from adalflow.components.model_client import GroqAPIClient, OpenAIClient\n", - "from adalflow.core.types import ModelType\n", - "from adalflow.utils import setup_env" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`AdalflowRAGPipeline` is a class that implements a Retrieval-Augmented Generation (RAG) pipeline with adalflow. It integrates:\n", - "\n", - "- Embedding models (e.g., Sentence Transformers) for document and query embeddings.\n", - "- FAISS for vector similarity search.\n", - "- A LLM client to generate context-aware responses using retrieved documents." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "class AdalflowRAGPipeline:\n", - " def __init__(self, \n", - " model_client = None,\n", - " model_kwargs = None,\n", - " embedding_model='all-MiniLM-L6-v2', \n", - " vector_dim=384, \n", - " top_k_retrieval=1):\n", - " \"\"\" \n", - " Initialize RAG Pipeline with embedding and retrieval components\n", - " \n", - " Args:\n", - " embedding_model (str): Sentence transformer model for embeddings\n", - " vector_dim (int): Dimension of embedding vectors\n", - " top_k_retrieval (int): Number of documents to retrieve\n", - " \"\"\"\n", - " # Initialize model client for generation\n", - " self.model_client = model_client\n", - " \n", - " # Initialize embedding model\n", - " self.embedding_model = SentenceTransformer(embedding_model)\n", - " \n", - " # Initialize FAISS index for vector similarity search\n", - " self.index = IndexFlatL2(vector_dim)\n", - " \n", - " # Store document texts and their embeddings\n", - " self.documents = []\n", - " self.document_embeddings = []\n", - " \n", - " # Retrieval parameters\n", - " self.top_k_retrieval = top_k_retrieval\n", - " \n", - " # Conversation history and context\n", - " self.conversation_history = \"\"\n", - " self.model_kwargs = model_kwargs\n", - "\n", - " def add_documents(self, documents: List[str]):\n", - " \"\"\"\n", - " Add documents to the RAG pipeline's knowledge base\n", - " \n", - " Args:\n", - " documents (List[str]): List of document texts to add\n", - " \"\"\"\n", - " for doc in documents:\n", - " # Embed document\n", - " embedding = self.embedding_model.encode(doc)\n", - " \n", - " # Add to index and document store\n", - " self.index.add(np.array([embedding]))\n", - " self.documents.append(doc)\n", - " self.document_embeddings.append(embedding)\n", - "\n", - " def retrieve_relevant_docs(self, query: str) -> List[str]:\n", - " \"\"\"\n", - " Retrieve most relevant documents for a given query\n", - " \n", - " Args:\n", - " query (str): Input query to find relevant documents\n", - " \n", - " Returns:\n", - " List[str]: Top k most relevant documents\n", - " \"\"\"\n", - " # Embed query\n", - " query_embedding = self.embedding_model.encode(query)\n", - " \n", - " # Perform similarity search\n", - " distances, indices = self.index.search(\n", - " np.array([query_embedding]), \n", - " self.top_k_retrieval\n", - " )\n", - " \n", - " # Retrieve and return top documents\n", - " return [self.documents[i] for i in indices[0]]\n", - "\n", - " def generate_response(self, query: str) -> str:\n", - " \"\"\"\n", - " Generate a response using retrieval-augmented generation\n", - " \n", - " Args:\n", - " query (str): User's input query\n", - " \n", - " Returns:\n", - " str: Generated response incorporating retrieved context\n", - " \"\"\"\n", - " # Retrieve relevant documents\n", - " retrieved_docs = self.retrieve_relevant_docs(query)\n", - " \n", - " # Construct context-aware prompt\n", - " context = \"\\n\\n\".join([f\"Context Document: {doc}\" for doc in retrieved_docs])\n", - " full_prompt = f\"\"\"\n", - " Context:\n", - " {context}\n", - " \n", - " Query: {query}\n", - " \n", - " Generate a comprehensive and informative response that:\n", - " 1. Uses the provided context documents\n", - " 2. Directly answers the query\n", - " 3. Incorporates relevant information from the context\n", - " \"\"\"\n", - " \n", - " # Prepare API arguments\n", - " api_kwargs = self.model_client.convert_inputs_to_api_kwargs(\n", - " input=full_prompt,\n", - " model_kwargs=self.model_kwargs,\n", - " model_type=ModelType.LLM\n", - " )\n", - " \n", - " # Call API and parse response\n", - " response = self.model_client.call(\n", - " api_kwargs=api_kwargs, \n", - " model_type=ModelType.LLM\n", - " )\n", - " response_text = self.model_client.parse_chat_completion(response)\n", - " \n", - " # Update conversation history\n", - " self.conversation_history += f\"\\nQuery: {query}\\nResponse: {response_text}\"\n", - " \n", - " return response_text\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The `run_rag_pipeline` function demonstrates how to use the AdalflowRAGPipeline for embedding documents, retrieving relevant context, and generating responses:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "def run_rag_pipeline(model_client, model_kwargs, documents, queries):\n", - " rag_pipeline = AdalflowRAGPipeline(model_client=model_client, model_kwargs=model_kwargs)\n", - "\n", - " rag_pipeline.add_documents(documents)\n", - "\n", - " # Generate responses\n", - " for query in queries:\n", - " print(f\"\\nQuery: {query}\")\n", - " response = rag_pipeline.generate_response(query)\n", - " print(f\"Response: {response}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Query: Does Ajith Kumar has any nick name ?\n", - "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=78, prompt_tokens=122, total_tokens=200), raw_response='Based on the provided context documents, Ajith Kumar, also known as Ajithvcoder, has a nickname that he has given himself. According to the context, Ajithvcoder is his nickname that he has chosen for himself.\\n\\nTherefore, the answer to the query is:\\n\\nYes, Ajith Kumar has a nickname that he has given himself, which is Ajithvcoder.', metadata=None)\n", - "\n", - "Query: What is the ajithvcoder's favourite food?\n", - "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=67, prompt_tokens=109, total_tokens=176), raw_response='Based on the provided context document, I can confidently answer the query as follows:\\n\\nAjithvcoder\\'s favourite food is Hyderabadi Panner Dum Briyani.\\n\\nThis answer is directly supported by the context document, which states: \"ajithvcoder likes Hyderabadi panner dum briyani much.\"', metadata=None)\n", - "\n", - "Query: When did ajithvcoder graduated ?\n", - "Response: GeneratorOutput(id=None, data=None, error=None, usage=CompletionUsage(completion_tokens=57, prompt_tokens=107, total_tokens=164), raw_response=\"Based on the provided context documents, we can determine that Ajith V.Coder graduated on May 2016.\\n\\nHere's a comprehensive and informative response that directly answers the query:\\n\\nAjith V.Coder graduated on May 2016, which is mentioned in the context document.\", metadata=None)\n" - ] - } - ], - "source": [ - "# setup_env()\n", - "\n", - "# ajithvcoder's statements are added so that we can validate that the LLM is generating from these lines only\n", - "documents = [\n", - " \"ajithvcoder is a good person whom the world knows as Ajith Kumar, ajithvcoder is his nick name that AjithKumar gave himself\",\n", - " \"The Eiffel Tower is a famous landmark in Paris, built in 1889 for the World's Fair.\",\n", - " \"ajithvcoder likes Hyderabadi panner dum briyani much.\",\n", - " \"The Louvre Museum in Paris is the world's largest art museum, housing thousands of works of art.\",\n", - " \"ajithvcoder has a engineering degree and he graduated on May, 2016.\"\n", - "]\n", - "\n", - "# Questions related to ajithvcoder's are added so that we can validate\n", - "# that the LLM is generating from above given lines only\n", - "queries = [\n", - " \"Does Ajith Kumar has any nick name ?\",\n", - " \"What is the ajithvcoder's favourite food?\",\n", - " \"When did ajithvcoder graduated ?\"\n", - "]\n", - "\n", - "groq_model_kwargs = {\n", - " \"model\": \"llama-3.2-1b-preview\", # Use 16k model for larger context\n", - " \"temperature\": 0.1,\n", - " \"max_tokens\": 800,\n", - "}\n", - "\n", - "openai_model_kwargs = {\n", - " \"model\": \"gpt-3.5-turbo\", # Use 16k model for larger context\n", - " \"temperature\": 0.1,\n", - " \"max_tokens\": 800,\n", - "}\n", - "\n", - "# Below example shows that adalflow can be used in a genric manner for any api provider\n", - "# without worrying about prompt and parsing results\n", - "model_client = GroqAPIClient()\n", - "run_rag_pipeline(model_client, groq_model_kwargs, documents, queries)\n", - "run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries)\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/adalflow_rag_documents.py b/tutorials/adalflow_rag_documents.py deleted file mode 100644 index e1807b2d..00000000 --- a/tutorials/adalflow_rag_documents.py +++ /dev/null @@ -1,248 +0,0 @@ -import os -import tiktoken -from typing import List -import numpy as np -from sentence_transformers import SentenceTransformer -from faiss import IndexFlatL2 - -from adalflow.components.model_client import GroqAPIClient, OpenAIClient -from adalflow.core.types import ModelType -from adalflow.utils import setup_env - -""" -pip install torch --index-url https://download.pytorch.org/whl/cpu -pip install sentence-transformers==3.3.1 -pip install faiss-cpu==1.9.0.post1 -""" - - -class AdalflowRAGPipeline: - def __init__( - self, - model_client=None, - model_kwargs=None, - embedding_model="all-MiniLM-L6-v2", - vector_dim=384, - top_k_retrieval=3, - max_context_tokens=800, - ): - """ - Initialize RAG Pipeline for handling large text files - - Args: - embedding_model (str): Sentence transformer model for embeddings - vector_dim (int): Dimension of embedding vectors - top_k_retrieval (int): Number of documents to retrieve - max_context_tokens (int): Maximum tokens to send to LLM - """ - # Initialize model client for generation - self.model_client = model_client - - # Initialize tokenizer for precise token counting - self.tokenizer = tiktoken.get_encoding("cl100k_base") - - # Initialize embedding model - self.embedding_model = SentenceTransformer(embedding_model) - - # Initialize FAISS index for vector similarity search - self.index = IndexFlatL2(vector_dim) - - # Store document texts, embeddings, and metadata - self.documents = [] - self.document_embeddings = [] - self.document_metadata = [] - - # Retrieval and context management parameters - self.top_k_retrieval = top_k_retrieval - self.max_context_tokens = max_context_tokens - - # Model generation parameters - self.model_kwargs = model_kwargs - - def load_text_file(self, file_path: str) -> List[str]: - """ - Load a large text file and split into manageable chunks - - Args: - file_path (str): Path to the text file - - Returns: - List[str]: List of document chunks - """ - with open(file_path, "r", encoding="utf-8") as file: - # Read entire file - content = file.read() - - # Split content into chunks (e.g., 10 lines per chunk) - lines = content.split("\n") - chunks = [] - chunk_size = 10 # Adjust based on your file structure - - for i in range(0, len(lines), chunk_size): - chunk = "\n".join(lines[i : i + chunk_size]) - chunks.append(chunk) - - return chunks - - def add_documents_from_directory(self, directory_path: str): - """ - Add documents from all text files in a directory - - Args: - directory_path (str): Path to directory containing text files - """ - for filename in os.listdir(directory_path): - if filename.endswith(".txt"): - file_path = os.path.join(directory_path, filename) - document_chunks = self.load_text_file(file_path) - - for chunk in document_chunks: - # Embed document chunk - embedding = self.embedding_model.encode(chunk) - - # Add to index and document store - self.index.add(np.array([embedding])) - self.documents.append(chunk) - self.document_embeddings.append(embedding) - self.document_metadata.append( - { - "filename": filename, - "chunk_index": len(self.document_metadata), - } - ) - - def count_tokens(self, text: str) -> int: - """ - Count tokens in a given text - - Args: - text (str): Input text - - Returns: - int: Number of tokens - """ - return len(self.tokenizer.encode(text)) - - def retrieve_and_truncate_context(self, query: str) -> str: - """ - Retrieve relevant documents and truncate to fit token limit - - Args: - query (str): Input query - - Returns: - str: Concatenated context within token limit - """ - # Retrieve relevant documents - query_embedding = self.embedding_model.encode(query) - distances, indices = self.index.search( - np.array([query_embedding]), self.top_k_retrieval - ) - - # Collect and truncate context - context = [] - current_tokens = 0 - - for idx in indices[0]: - doc = self.documents[idx] - doc_tokens = self.count_tokens(doc) - - # Check if adding this document would exceed token limit - if current_tokens + doc_tokens <= self.max_context_tokens: - context.append(doc) - current_tokens += doc_tokens - else: - break - - return "\n\n".join(context) - - def generate_response(self, query: str) -> str: - """ - Generate a response using retrieval-augmented generation - - Args: - query (str): User's input query - - Returns: - str: Generated response incorporating retrieved context - """ - # Retrieve and truncate context - retrieved_context = self.retrieve_and_truncate_context(query) - - # Construct context-aware prompt - full_prompt = f""" - Context Documents: - {retrieved_context} - - Query: {query} - - Generate a comprehensive response that: - 1. Directly answers the query - 2. Incorporates relevant information from the context documents - 3. Provides clear and concise information - """ - - # Prepare API arguments - api_kwargs = self.model_client.convert_inputs_to_api_kwargs( - input=full_prompt, model_kwargs=self.model_kwargs, model_type=ModelType.LLM - ) - - # Call API and parse response - response = self.model_client.call( - api_kwargs=api_kwargs, model_type=ModelType.LLM - ) - response_text = self.model_client.parse_chat_completion(response) - - return response_text - - -def run_rag_pipeline(model_client, model_kwargs, documents, queries): - - # Example usage of RAG pipeline - rag_pipeline = AdalflowRAGPipeline( - model_client=model_client, - model_kwargs=model_kwargs, - top_k_retrieval=2, # Retrieve top 3 most relevant chunks - max_context_tokens=800, # Limit context to 1500 tokens - ) - - # Add documents from a directory of text files - rag_pipeline.add_documents_from_directory(documents) - - # Generate responses - for query in queries: - print(f"\nQuery: {query}") - response = rag_pipeline.generate_response(query) - print(f"Response: {response}") - - -def main(): - setup_env() - - documents = "./tutorials/assets/documents" - - queries = [ - "What year was the Crystal Cavern discovered?", - "What is the name of the rare tree in Elmsworth?", - "What local legend claim that Lunaflits surrounds?", - ] - - groq_model_kwargs = { - "model": "llama-3.2-1b-preview", # Use 16k model for larger context - "temperature": 0.1, - "max_tokens": 800, - } - - openai_model_kwargs = { - "model": "gpt-3.5-turbo", - "temperature": 0.1, - "max_tokens": 800, - } - # Below example shows that adalflow can be used in a genric manner for any api provider - # without worrying about prompt and parsing results - run_rag_pipeline(GroqAPIClient(), groq_model_kwargs, documents, queries) - run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries) - - -if __name__ == "__main__": - main() diff --git a/tutorials/adalflow_rag_vanilla.py b/tutorials/adalflow_rag_vanilla.py deleted file mode 100644 index 36af7997..00000000 --- a/tutorials/adalflow_rag_vanilla.py +++ /dev/null @@ -1,188 +0,0 @@ -from typing import List -import numpy as np -from sentence_transformers import SentenceTransformer -from faiss import IndexFlatL2 - -from adalflow.components.model_client import GroqAPIClient, OpenAIClient -from adalflow.core.types import ModelType -from adalflow.utils import setup_env - -""" -pip install torch --index-url https://download.pytorch.org/whl/cpu -pip install sentence-transformers==3.3.1 -pip install faiss-cpu==1.9.0.post1 -""" - - -class AdalflowRAGPipeline: - def __init__( - self, - model_client=None, - model_kwargs=None, - embedding_model="all-MiniLM-L6-v2", - vector_dim=384, - top_k_retrieval=1, - ): - """ - Initialize RAG Pipeline with embedding and retrieval components - - Args: - embedding_model (str): Sentence transformer model for embeddings - vector_dim (int): Dimension of embedding vectors - top_k_retrieval (int): Number of documents to retrieve - """ - # Initialize model client for generation - self.model_client = model_client - - # Initialize embedding model - self.embedding_model = SentenceTransformer(embedding_model) - - # Initialize FAISS index for vector similarity search - self.index = IndexFlatL2(vector_dim) - - # Store document texts and their embeddings - self.documents = [] - self.document_embeddings = [] - - # Retrieval parameters - self.top_k_retrieval = top_k_retrieval - - # Conversation history and context - self.conversation_history = "" - self.model_kwargs = model_kwargs - - def add_documents(self, documents: List[str]): - """ - Add documents to the RAG pipeline's knowledge base - - Args: - documents (List[str]): List of document texts to add - """ - for doc in documents: - # Embed document - embedding = self.embedding_model.encode(doc) - - # Add to index and document store - self.index.add(np.array([embedding])) - self.documents.append(doc) - self.document_embeddings.append(embedding) - - def retrieve_relevant_docs(self, query: str) -> List[str]: - """ - Retrieve most relevant documents for a given query - - Args: - query (str): Input query to find relevant documents - - Returns: - List[str]: Top k most relevant documents - """ - # Embed query - query_embedding = self.embedding_model.encode(query) - - # Perform similarity search - distances, indices = self.index.search( - np.array([query_embedding]), self.top_k_retrieval - ) - - # Retrieve and return top documents - return [self.documents[i] for i in indices[0]] - - def generate_response(self, query: str) -> str: - """ - Generate a response using retrieval-augmented generation - - Args: - query (str): User's input query - - Returns: - str: Generated response incorporating retrieved context - """ - # Retrieve relevant documents - retrieved_docs = self.retrieve_relevant_docs(query) - - # Construct context-aware prompt - context = "\n\n".join([f"Context Document: {doc}" for doc in retrieved_docs]) - full_prompt = f""" - Context: - {context} - - Query: {query} - - Generate a comprehensive and informative response that: - 1. Uses the provided context documents - 2. Directly answers the query - 3. Incorporates relevant information from the context - """ - - # Prepare API arguments - api_kwargs = self.model_client.convert_inputs_to_api_kwargs( - input=full_prompt, model_kwargs=self.model_kwargs, model_type=ModelType.LLM - ) - - # Call API and parse response - response = self.model_client.call( - api_kwargs=api_kwargs, model_type=ModelType.LLM - ) - response_text = self.model_client.parse_chat_completion(response) - - # Update conversation history - self.conversation_history += f"\nQuery: {query}\nResponse: {response_text}" - - return response_text - - -def run_rag_pipeline(model_client, model_kwargs, documents, queries): - rag_pipeline = AdalflowRAGPipeline( - model_client=model_client, model_kwargs=model_kwargs - ) - - rag_pipeline.add_documents(documents) - - # Generate responses - for query in queries: - print(f"\nQuery: {query}") - response = rag_pipeline.generate_response(query) - print(f"Response: {response}") - - -def main(): - setup_env() - - # ajithvcoder's statements are added so that we can validate that the LLM is generating from these lines only - documents = [ - "ajithvcoder is a good person whom the world knows as Ajith Kumar, ajithvcoder is his nick name that AjithKumar gave himself", - "The Eiffel Tower is a famous landmark in Paris, built in 1889 for the World's Fair.", - "ajithvcoder likes Hyderabadi panner dum briyani much.", - "The Louvre Museum in Paris is the world's largest art museum, housing thousands of works of art.", - "ajithvcoder has a engineering degree and he graduated on May, 2016.", - ] - - # Questions related to ajithvcoder's are added so that we can validate - # that the LLM is generating from above given lines only - queries = [ - "Does Ajith Kumar has any nick name ?", - "What is the ajithvcoder's favourite food?", - "When did ajithvcoder graduated ?", - ] - - groq_model_kwargs = { - "model": "llama-3.2-1b-preview", # Use 16k model for larger context - "temperature": 0.1, - "max_tokens": 800, - } - - openai_model_kwargs = { - "model": "gpt-3.5-turbo", # Use 16k model for larger context - "temperature": 0.1, - "max_tokens": 800, - } - - # Below example shows that adalflow can be used in a genric manner for any api provider - # without worrying about prompt and parsing results - run_rag_pipeline(GroqAPIClient(), groq_model_kwargs, documents, queries) - run_rag_pipeline(OpenAIClient(), openai_model_kwargs, documents, queries) - - -if __name__ == "__main__": - main() diff --git a/tutorials/assets/documents/The Bioluminescent Guardians of the Desert Canyon.txt b/tutorials/assets/documents/The Bioluminescent Guardians of the Desert Canyon.txt deleted file mode 100644 index 324f4b41..00000000 --- a/tutorials/assets/documents/The Bioluminescent Guardians of the Desert Canyon.txt +++ /dev/null @@ -1,6 +0,0 @@ -In a remote desert canyon, scientists discovered a colony of glowing insects called "Lunaflits." These -creatures produce bioluminescence to attract mates and ward off predators. Unlike fireflies, Lunaflits -emit a constant, soothing green light that illuminates the canyon at night. The canyon walls are covered -with a rare moss that reflects their light, creating an ethereal glow. Researchers have found that Lunaflits -communicate through rhythmic light pulses, forming intricate patterns. Local legends claim these insects -are guardians of ancient treasure buried deep within the canyon. diff --git a/tutorials/assets/documents/The Enigmatic Crystal Cavern of Lake Aurora.txt b/tutorials/assets/documents/The Enigmatic Crystal Cavern of Lake Aurora.txt deleted file mode 100644 index 0b4929ab..00000000 --- a/tutorials/assets/documents/The Enigmatic Crystal Cavern of Lake Aurora.txt +++ /dev/null @@ -1,6 +0,0 @@ -Hidden beneath the icy waters of Lake Aurora lies the Crystal Cavern, a natural wonder discovered by divers -in 1987. The cavern is adorned with shimmering quartz formations that refract sunlight into a spectrum of -colors. It is said that the cavern once served as a sanctuary for an ancient civilization that revered the -crystals as conduits to the spirit world. Explorers have recovered artifacts carved with intricate symbols, -suggesting a deep connection to celestial events. However, accessing the cavern is perilous due to the lake's -freezing temperatures and strong currents. diff --git a/tutorials/assets/documents/The Legend of the Moonshade Willow.txt b/tutorials/assets/documents/The Legend of the Moonshade Willow.txt deleted file mode 100644 index 36e342b3..00000000 --- a/tutorials/assets/documents/The Legend of the Moonshade Willow.txt +++ /dev/null @@ -1,6 +0,0 @@ -In the mystical village of Elmsworth, a rare tree known as the "Moonshade Willow" blooms once every seven -years. Its blossoms emit a soft glow, and villagers believe that meditating under its branches brings vivid -dreams of the future. The tree's bark is said to contain a secret resin used in ancient healing rituals. Elders -claim the Moonshade Willow was a gift from a goddess to protect the village. Despite its sacred status, -researchers have discovered that the tree thrives only in Elmsworth's unique soil, making it impossible to -cultivate elsewhere. From f093e9d9a0791743d59744261df4aefd591bae36 Mon Sep 17 00:00:00 2001 From: Li Yin Date: Sat, 14 Dec 2024 14:44:06 -0800 Subject: [PATCH 40/40] add change log --- adalflow/CHANGELOG.md | 8 +- .../components/model_client/bedrock_client.py | 4 + notebooks/adalflow_colab_template.ipynb | 4 +- notebooks/evaluation/adalflow_llm_eval.ipynb | 14 +- ...lflow_object_count_auto_optimization.ipynb | 143 +- ...adalflow_classification_optimization.ipynb | 912 ++++---- notebooks/tutorials/adalflow_component.ipynb | 1869 +++++++++-------- .../tutorials/adalflow_dataclasses.ipynb | 152 +- .../tutorials/adalflow_function_calls.ipynb | 1407 +++++++------ notebooks/tutorials/adalflow_logger.ipynb | 466 ++-- .../tutorials/adalflow_rag_optimization.ipynb | 969 ++++----- .../tutorials/adalflow_rag_playbook.ipynb | 1014 ++++----- .../tutorials/adalflow_text_splitter.ipynb | 24 +- notebooks/tutorials/adalflow_tracing.ipynb | 355 ++-- tutorials/database.ipynb | 128 +- tutorials/dataclass.ipynb | 49 +- tutorials/embedder.ipynb | 7 +- tutorials/generator.ipynb | 10 +- tutorials/model_client.ipynb | 19 +- tutorials/rag.ipynb | 13 +- tutorials/react_note.ipynb | 6 +- tutorials/retriever.ipynb | 227 +- tutorials/tools.ipynb | 491 +++-- use_cases/agent/react_agent.ipynb | 140 +- use_cases/generator/intermediate.ipynb | 18 +- use_cases/question_answering/chatbot.ipynb | 15 +- use_cases/question_answering/simple_qa.ipynb | 11 +- 27 files changed, 4359 insertions(+), 4116 deletions(-) diff --git a/adalflow/CHANGELOG.md b/adalflow/CHANGELOG.md index e5b806f8..f87773ab 100644 --- a/adalflow/CHANGELOG.md +++ b/adalflow/CHANGELOG.md @@ -1,4 +1,10 @@ - +## [0.2.7] - TO Be Released +### Added +- `Memory` is completed with `call` and `add_dialog_turn` methods. +- Integrated `LanceDB` in the `Retriever` +### Improved +- `BedrockAPIClient` added more details on setup, yet it is still in experimental stage. +- `AzureAPIClient` added more details on setup, yet it is still in experimental stage. ## [0.2.6] - 2024-11-25 ### Improved - Add default `max_tokens=512` to the `AnthropicAPIClient` to avoid the error when the user does not provide the `max_tokens` in the prompt. diff --git a/adalflow/adalflow/components/model_client/bedrock_client.py b/adalflow/adalflow/components/model_client/bedrock_client.py index d25b48bc..b10098bb 100644 --- a/adalflow/adalflow/components/model_client/bedrock_client.py +++ b/adalflow/adalflow/components/model_client/bedrock_client.py @@ -39,6 +39,10 @@ def get_first_message_content(completion: Dict) -> str: class BedrockAPIClient(ModelClient): __doc__ = r"""A component wrapper for the Bedrock API client. + Note: + + This api is in experimental and is not fully tested and validated yet. + Support: 1. AWS Titan 2. Claude diff --git a/notebooks/adalflow_colab_template.ipynb b/notebooks/adalflow_colab_template.ipynb index 191bbf08..39715816 100644 --- a/notebooks/adalflow_colab_template.ipynb +++ b/notebooks/adalflow_colab_template.ipynb @@ -87,8 +87,8 @@ "\n", "\n", "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", - "os.environ['GROQ_API_KEY'] = groq_api_key\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + "os.environ[\"GROQ_API_KEY\"] = groq_api_key\n", "\n", "print(\"API keys have been set.\")" ] diff --git a/notebooks/evaluation/adalflow_llm_eval.ipynb b/notebooks/evaluation/adalflow_llm_eval.ipynb index 5e903978..448215e3 100644 --- a/notebooks/evaluation/adalflow_llm_eval.ipynb +++ b/notebooks/evaluation/adalflow_llm_eval.ipynb @@ -95,7 +95,7 @@ "\n", "\n", "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", "\n", "print(\"API keys have been set.\")" ] @@ -301,11 +301,11 @@ "source": [ "# without questions, and with customized judgement query\n", "\n", + "\n", "def compute_llm_as_judge_wo_questions():\n", " from adalflow.eval.llm_as_judge import LLMasJudge, DefaultLLMJudge\n", " from adalflow.components.model_client import OpenAIClient\n", "\n", - "\n", " llm_judge = DefaultLLMJudge(\n", " model_client=OpenAIClient(),\n", " model_kwargs={\n", @@ -413,7 +413,7 @@ " eval_rslt = llm_evaluator.compute(\n", " questions=questions, gt_answers=gt_answers, pred_answers=pred_answers\n", " )\n", - " print(eval_rslt)\n" + " print(eval_rslt)" ] }, { @@ -569,8 +569,12 @@ } ], "source": [ - "source=\"Paul Merson has restarted his row with Andros Townsend after the Tottenham midfielder was brought on with only seven minutes remaining in his team 's 0-0 draw with Burnley on Sunday . 'Just been watching the game , did you miss the coach ? # RubberDub # 7minutes , ' Merson put on Twitter . Merson initially angered Townsend for writing in his Sky Sports column that 'if Andros Townsend can get in ( the England team ) then it opens it up to anybody . ' Paul Merson had another dig at Andros Townsend after his appearance for Tottenham against Burnley Townsend was brought on in the 83rd minute for Tottenham as they drew 0-0 against Burnley Andros Townsend scores England 's equaliser in their 1-1 friendly draw with Italy in Turin on Tuesday night The former Arsenal man was proven wrong when Townsend hit a stunning equaliser for England against Italy and he duly admitted his mistake . 'It 's not as though I was watching hoping he would n't score for England , I 'm genuinely pleased for him and fair play to him \\u00e2\\u20ac\\u201c it was a great goal , ' Merson said . 'It 's just a matter of opinion , and my opinion was that he got pulled off after half an hour at Manchester United in front of Roy Hodgson , so he should n't have been in the squad . 'When I 'm wrong , I hold my hands up . I do n't have a problem with doing that - I 'll always be the first to admit when I 'm wrong . ' Townsend hit back at Merson on Twitter after scoring for England against Italy Sky Sports pundit Merson ( centre ) criticised Townsend 's call-up to the England squad last week Townsend hit back at Merson after netting for England in Turin on Wednesday , saying 'Not bad for a player that should be 'nowhere near the squad ' ay @ PaulMerse ? ' Any bad feeling between the pair seemed to have passed but Merson was unable to resist having another dig at Townsend after Tottenham drew at Turf Moor .\",\n", - "summary=\"Paul merson was brought on with only seven minutes remaining in his team 's 0-0 draw with burnley . Andros townsend scored the tottenham midfielder in the 89th minute . Paul merson had another dig at andros townsend after his appearance . The midfielder had been brought on to the england squad last week . Click here for all the latest arsenal news news .\",\n", + "source = (\n", + " \"Paul Merson has restarted his row with Andros Townsend after the Tottenham midfielder was brought on with only seven minutes remaining in his team 's 0-0 draw with Burnley on Sunday . 'Just been watching the game , did you miss the coach ? # RubberDub # 7minutes , ' Merson put on Twitter . Merson initially angered Townsend for writing in his Sky Sports column that 'if Andros Townsend can get in ( the England team ) then it opens it up to anybody . ' Paul Merson had another dig at Andros Townsend after his appearance for Tottenham against Burnley Townsend was brought on in the 83rd minute for Tottenham as they drew 0-0 against Burnley Andros Townsend scores England 's equaliser in their 1-1 friendly draw with Italy in Turin on Tuesday night The former Arsenal man was proven wrong when Townsend hit a stunning equaliser for England against Italy and he duly admitted his mistake . 'It 's not as though I was watching hoping he would n't score for England , I 'm genuinely pleased for him and fair play to him \\u00e2\\u20ac\\u201c it was a great goal , ' Merson said . 'It 's just a matter of opinion , and my opinion was that he got pulled off after half an hour at Manchester United in front of Roy Hodgson , so he should n't have been in the squad . 'When I 'm wrong , I hold my hands up . I do n't have a problem with doing that - I 'll always be the first to admit when I 'm wrong . ' Townsend hit back at Merson on Twitter after scoring for England against Italy Sky Sports pundit Merson ( centre ) criticised Townsend 's call-up to the England squad last week Townsend hit back at Merson after netting for England in Turin on Wednesday , saying 'Not bad for a player that should be 'nowhere near the squad ' ay @ PaulMerse ? ' Any bad feeling between the pair seemed to have passed but Merson was unable to resist having another dig at Townsend after Tottenham drew at Turf Moor .\",\n", + ")\n", + "summary = (\n", + " \"Paul merson was brought on with only seven minutes remaining in his team 's 0-0 draw with burnley . Andros townsend scored the tottenham midfielder in the 89th minute . Paul merson had another dig at andros townsend after his appearance . The midfielder had been brought on to the england squad last week . Click here for all the latest arsenal news news .\",\n", + ")\n", "\n", "compute_g_eval_summarization(source=source, summary=summary)" ] diff --git a/notebooks/qas/adalflow_object_count_auto_optimization.ipynb b/notebooks/qas/adalflow_object_count_auto_optimization.ipynb index ac7e3cbf..9308ea7f 100644 --- a/notebooks/qas/adalflow_object_count_auto_optimization.ipynb +++ b/notebooks/qas/adalflow_object_count_auto_optimization.ipynb @@ -137,12 +137,14 @@ "\n", "# Prompt user to enter their API keys securely\n", "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", - "groq_api_key = getpass(\"Please enter your GROQ API key, simplly press Enter if you don't have one: \")\n", + "groq_api_key = getpass(\n", + " \"Please enter your GROQ API key, simplly press Enter if you don't have one: \"\n", + ")\n", "\n", "\n", "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", - "os.environ['GROQ_API_KEY'] = groq_api_key\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + "os.environ[\"GROQ_API_KEY\"] = groq_api_key\n", "\n", "print(\"API keys have been set.\")" ] @@ -209,6 +211,7 @@ "\n", "\"\"\"\n", "\n", + "\n", "class ObjectCountTaskPipeline(adal.Component):\n", " def __init__(self, model_client: adal.ModelClient, model_kwargs: Dict):\n", " super().__init__()\n", @@ -242,9 +245,7 @@ " self, question: str, id: str = None\n", " ) -> Union[adal.GeneratorOutput, adal.Parameter]:\n", " output = self.llm_counter(prompt_kwargs={\"input_str\": question}, id=id)\n", - " return output\n", - "\n", - "\n" + " return output" ] }, { @@ -329,44 +330,42 @@ "from adalflow.components.model_client.groq_client import GroqAPIClient\n", "\n", "\n", - "if len(os.environ['OPENAI_API_KEY']) > 1:\n", - " gpt_3_model = {\n", - " \"model_client\": OpenAIClient(),\n", - " \"model_kwargs\": {\n", - " \"model\": \"gpt-3.5-turbo\",\n", - " \"max_tokens\": 2000,\n", - " \"temperature\": 0.0,\n", - " \"top_p\": 0.99,\n", - " \"frequency_penalty\": 0,\n", - " \"presence_penalty\": 0,\n", - " \"stop\": None,\n", - " },\n", - " }\n", - " gpt_4o_model = {\n", - " \"model_client\": OpenAIClient(),\n", - " \"model_kwargs\": {\n", - " \"model\": \"gpt-4o\",\n", - " \"max_tokens\": 4000,\n", - " \"temperature\": 0.0,\n", - " \"top_p\": 0.99,\n", - " \"frequency_penalty\": 0,\n", - " \"presence_penalty\": 0,\n", - " \"stop\": None,\n", - " },\n", - " }\n", + "if len(os.environ[\"OPENAI_API_KEY\"]) > 1:\n", + " gpt_3_model = {\n", + " \"model_client\": OpenAIClient(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-3.5-turbo\",\n", + " \"max_tokens\": 2000,\n", + " \"temperature\": 0.0,\n", + " \"top_p\": 0.99,\n", + " \"frequency_penalty\": 0,\n", + " \"presence_penalty\": 0,\n", + " \"stop\": None,\n", + " },\n", + " }\n", + " gpt_4o_model = {\n", + " \"model_client\": OpenAIClient(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-4o\",\n", + " \"max_tokens\": 4000,\n", + " \"temperature\": 0.0,\n", + " \"top_p\": 0.99,\n", + " \"frequency_penalty\": 0,\n", + " \"presence_penalty\": 0,\n", + " \"stop\": None,\n", + " },\n", + " }\n", "\n", - "if len(os.environ['GROQ_API_KEY']) > 1:\n", - " llama_3_1_model ={\n", - " \"model_client\": GroqAPIClient(),\n", - " \"model_kwargs\": {\n", - " \"model\": \"llama-3.1-8b-instant\"\n", - " }\n", - " }\n", + "if len(os.environ[\"GROQ_API_KEY\"]) > 1:\n", + " llama_3_1_model = {\n", + " \"model_client\": GroqAPIClient(),\n", + " \"model_kwargs\": {\"model\": \"llama-3.1-8b-instant\"},\n", + " }\n", "\n", "\n", "question = \"I have a flute, a piano, a trombone, four stoves, a violin, an accordion, a clarinet, a drum, two lamps, and a trumpet. How many musical instruments do I have?\"\n", "task_pipeline = ObjectCountTaskPipeline(**gpt_3_model)\n", - "print(task_pipeline)\n" + "print(task_pipeline)" ] }, { @@ -467,6 +466,7 @@ "from adalflow.datasets.big_bench_hard import BigBenchHard\n", "from adalflow.utils.data import subset_dataset\n", "\n", + "\n", "def load_datasets(max_samples: int = None):\n", " \"\"\"Load the dataset\"\"\"\n", " train_data = BigBenchHard(split=\"train\")\n", @@ -479,7 +479,7 @@ " val_data = subset_dataset(val_data, max_samples)\n", " test_data = subset_dataset(test_data, max_samples)\n", "\n", - " return train_data, val_data, test_data\n" + " return train_data, val_data, test_data" ] }, { @@ -583,11 +583,11 @@ " def prepare_task(self, sample: Example):\n", " return self.task.call, {\"question\": sample.question, \"id\": sample.id}\n", "\n", - " def prepare_eval(\n", - " self, sample: Example, y_pred: adal.GeneratorOutput\n", - " ) -> float:\n", + " def prepare_eval(self, sample: Example, y_pred: adal.GeneratorOutput) -> float:\n", " y_label = -1\n", - " if (y_pred is not None and y_pred.data is not None): # if y_pred and y_pred.data: might introduce bug when the data is 0\n", + " if (\n", + " y_pred is not None and y_pred.data is not None\n", + " ): # if y_pred and y_pred.data: might introduce bug when the data is 0\n", " y_label = y_pred.data\n", " return self.eval_fn, {\"y\": y_label, \"y_gt\": sample.answer}" ] @@ -820,7 +820,7 @@ "from adalflow.datasets.types import Example\n", "\n", "\n", - "class ObjectCountAdalComponent(adal.AdalComponent):# noqa: F811\n", + "class ObjectCountAdalComponent(adal.AdalComponent): # noqa: F811\n", " def __init__(\n", " self,\n", " model_client: adal.ModelClient,\n", @@ -844,12 +844,11 @@ " def prepare_task(self, sample: Example):\n", " return self.task.call, {\"question\": sample.question, \"id\": sample.id}\n", "\n", - "\n", - " def prepare_eval(\n", - " self, sample: Example, y_pred: adal.GeneratorOutput\n", - " ) -> float:\n", + " def prepare_eval(self, sample: Example, y_pred: adal.GeneratorOutput) -> float:\n", " y_label = -1\n", - " if (y_pred is not None and y_pred.data is not None): # if y_pred and y_pred.data: might introduce bug when the data is 0\n", + " if (\n", + " y_pred is not None and y_pred.data is not None\n", + " ): # if y_pred and y_pred.data: might introduce bug when the data is 0\n", " y_label = y_pred.data\n", " return self.eval_fn, {\"y\": y_label, \"y_gt\": sample.answer}\n", "\n", @@ -891,7 +890,7 @@ " **gpt_3_model,\n", " teacher_model_config=gpt_4o_model,\n", " text_optimizer_model_config=gpt_4o_model,\n", - " backward_engine_model_config=gpt_4o_model\n", + " backward_engine_model_config=gpt_4o_model,\n", " )\n", " print(adal_component)\n", " trainer = adal.Trainer(\n", @@ -916,7 +915,7 @@ " test_dataset=test_dataset,\n", " debug=debug,\n", " resume_from_ckpt=resume_from_ckpt,\n", - " )\n" + " )" ] }, { @@ -3255,10 +3254,14 @@ } ], "source": [ - "train(debug=False, max_steps=12, strategy=\"constrained\",\n", - " raw_shots=0, bootstrap_shots=1,\n", - " exclude_input_fields_from_bootstrap_demos=True\n", - " )" + "train(\n", + " debug=False,\n", + " max_steps=12,\n", + " strategy=\"constrained\",\n", + " raw_shots=0,\n", + " bootstrap_shots=1,\n", + " exclude_input_fields_from_bootstrap_demos=True,\n", + ")" ] }, { @@ -6015,13 +6018,17 @@ } ], "source": [ - "\n", "ckpt_path = \"/content/adalflow/ckpt/ObjectCountAdalComponent/constrained_max_steps_12_4e8a1_run_1.json\"\n", "\n", - "train(debug=False, max_steps=12, strategy=\"constrained\",\n", - " raw_shots=0, bootstrap_shots=1,\n", - " resume_from_ckpt=ckpt_path,\n", - " exclude_input_fields_from_bootstrap_demos=True)" + "train(\n", + " debug=False,\n", + " max_steps=12,\n", + " strategy=\"constrained\",\n", + " raw_shots=0,\n", + " bootstrap_shots=1,\n", + " resume_from_ckpt=ckpt_path,\n", + " exclude_input_fields_from_bootstrap_demos=True,\n", + ")" ] }, { @@ -8038,11 +8045,15 @@ } ], "source": [ - "\n", - "train(debug=False, max_steps=12, strategy=\"random\",\n", - " raw_shots=0, bootstrap_shots=1,\n", - " resume_from_ckpt=ckpt_path,\n", - " exclude_input_fields_from_bootstrap_demos=False)" + "train(\n", + " debug=False,\n", + " max_steps=12,\n", + " strategy=\"random\",\n", + " raw_shots=0,\n", + " bootstrap_shots=1,\n", + " resume_from_ckpt=ckpt_path,\n", + " exclude_input_fields_from_bootstrap_demos=False,\n", + ")" ] }, { diff --git a/notebooks/tutorials/adalflow_classification_optimization.ipynb b/notebooks/tutorials/adalflow_classification_optimization.ipynb index 0afb97df..c6bddc7e 100644 --- a/notebooks/tutorials/adalflow_classification_optimization.ipynb +++ b/notebooks/tutorials/adalflow_classification_optimization.ipynb @@ -1,463 +1,461 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "provenance": [] - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] }, - "cells": [ - { - "cell_type": "markdown", - "source": [ - "# 🤗 Welcome to AdalFlow!\n", - "## The PyTorch library to auto-optimize any LLM task pipelines\n", - "\n", - "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", - "\n", - "\n", - "# Quick Links\n", - "\n", - "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", - "\n", - "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", - "\n", - "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", - "\n", - "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", - "\n", - "## 📖 Outline\n", - "\n", - "This is the code for a classification optimization tutorial ![image.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA+gAAAJYCAIAAAB+fFtyAAAAAXNSR0IArs4c6QAAAERlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAD6KADAAQAAAABAAACWAAAAADDsFQWAABAAElEQVR4AeydB5gURRqGe5clLJJUMnqIiCiKYBbBnOMZThEDYMCcc1bMeurpKYZTDHcqYsSM2RMFwawcZhEUliRZ4rJ772xJ0fSEnZ2Znunu+ebhWaqrq6v+eqt75qu//64uqa6udvQRAREQAREQAREQAREQAREINoHSYJsn60RABERABERABERABERABGIEJNx1HoiACIiACIiACIiACIhACAhIuIdgkGSiCIiACIiACIiACIiACEi46xwQAREQAREQAREQAREQgRAQkHAPwSDJRBEQAREQAREQAREQARGQcNc5IAIiIAIiIAIiIAIiIAIhICDhHoJBkokiIAIiIAIiIAIiIAIiIOGuc0AEREAEREAEREAEREAEQkBAwj0EgyQTRUAEREAEREAEREAEREDCXeeACIiACIiACIiACIiACISAgIR7CAZJJoqACIiACIiACIiACIiAhLvOAREQAREQAREQAREQAREIAQEJ9xAMkkwUAREQAREQAREQAREQAQl3nQMiIAIiIAIiIAIiIAIiEAICEu4hGCSZKAIiIAIiIAIiIAIiIAIS7joHREAEREAEREAEREAERCAEBCTcQzBIMlEEREAEREAEREAEREAEJNx1DoiACIiACIiACIiACIhACAhIuIdgkGSiCIiACIiACIiACIiACEi46xwQAREQAREQAREQAREQgRAQkHAPwSDJRBEQAREQAREQAREQARGQcNc5IAIiIAIiIAIiIAIiIAIhICDhHoJBkokiIAIiIAIiIAIiIAIiIOGuc0AEREAEREAEREAEREAEQkBAwj0EgyQTRUAEREAEREAEREAEREDCXeeACIiACIiACIiACIiACISAgIR7CAZJJoqACIiACIiACIiACIiAhLvOAREQAREQAREQAREQAREIAQEJ9xAMkkwUAREQAREQAREQAREQAQl3nQMiIAIiIAIiIAIiIAIiEAICEu4hGCSZKAIiIAIiIAIiIAIiIAIS7joHREAEREAEREAEREAERCAEBCTcQzBIMlEEREAEREAEREAEREAEJNx1DoiACIiACIiACIiACIhACAhIuIdgkGSiCIiACIiACIiACIiACEi46xwQAREQAREQAREQAREQgRAQkHAPwSDJRBEQAREQAREQAREQARGQcNc5IAIiIAIiIAIiIAIiIAIhICDhHoJBkokiIAIiIAIiIAIiIAIiIOGuc0AEREAEREAEREAEREAEQkBAwj0EgyQTRUAEREAEREAEREAEREDCXeeACIiACIiACIiACIiACISAgIR7CAZJJoqACIiACIiACIiACIiAhLvOAREQAREQAREQAREQAREIAQEJ9xAMkkwUAREQAREQAREQAREQAQl3nQMiIAIiIAIiIAIiIAIiEAICEu4hGCSZKAIiIAIiIAIiIAIiIAIS7joHREAEREAEREAEREAERCAEBCTcQzBIMlEEREAEREAEREAEREAEJNx1DoiACOSYwC+//FJSUvLII4/kuN5wVrdzzSdN2+F29dVXp1k4V8WyHK/33nsPs/mbK3tsPf7VbJtQok4E/D4/PRfL9OnT//a3v6299tq0e8cdd/h6Pqy33noDBw6sEw0VFoGCEJBwLwh2NZohAb6+U3wykA6LFi1CJ6V54Kuvvkrr7du3r6qqyrADITnM/EA+88wzHntPP/10CHgy87+J0Dz22GM7d+7cqFGjtm3b7rjjjldddVX+zYhvccKECZxOmBe/K1c5v//++wUXXNC1a1f6vtZaa+21114vv/xynSp/4okn0EB1OiQ/he+5554wTvYY8RRfSihR6KEIbZmGDRtuuOGGV1555ZIlS9xgbQF34uSTT3aX4cI85JBDOOcbNGjQunXrAw444LnnnnMXiE//9NNPRx55JIXLy8u7dOly2WWXxZdx53zxxRdHH330uuuui52cYLvvvvvDDz+8YsUKd5m8pc8555zXX3/9kksu+c9//rP33nvnqt3Ro0czanPnzs1VhapHBPJJoCyfjaktEciSAF/ftoZ///vfb775pjtn4403tnvTTCDcBw8eTGHz+5r6qMcffxyvDLLsnXfe4fcsdeFi3tuxY8fFixfXr1/fDwg//vjj1ltvjQo57rjjGI6KiorPPvvs5ptvNuPoR4vp14lwxwzOJQyzR73xxhs2nWXiu+++22233WbOnMm8ZauttkJ5cE6i3s4///y///3vaVaOcB8/fvzZZ59ty2c5XkycGG6kpK0wswTCvWXLlm6vZ65qzsyeNI9CSW+wwQam8MKFC0855ZSDDz6YTJPTpk0bk0AHP/jgg6TnzZv3wgsvXHvttUhqhs/sNX/32GOP/v37u3OQ+HaT2ek111yD+D7ppJMYMqZwuBIOPfRQKkGa22LuBCqcs7FDhw7nnXcefuvJkyf/+uuv7gKeNBYyVcDmY445hoYWLFjw9ttvH3/88Vxll156qaewH5uei4Vv2r/+9a+c3qYtaOTkTEO4c51yprVo0cL2gourtFSuTMtDieASkHAP7tjIsngCuIJs5kcffYRwd+fYXX4k/vjjD35ub7zxRvxP/FLmX7hjwBprrOFH13JeJy5D/ME5r9ZU+I9//AN5hCJBu9gmZsyYYdNBS2SvaE2Pli9fTtjAnDlz3n///W233dZk4pI86qijbr31VnR83759M+t7luOF3PFpuP2rOTNQCY/arOZjds2aNQvhTkb891JZWZnNPPXUU7fffvthw4bdfvvtVtlTA8LUlvG0xe0vVDsnAPMuOyXm3gsOaU4MT2GzyY1B9PdGG2307rvvMtFNWMadyTcqqr1Xr17MB5o2bWp2McH75JNPmOm5S/qX9lwsXNdube3r+cDMyr9+qWYRyCWBan1EIJwETjvtNK4Eazs3c5F03bp14/uX+8Innnji7Nmz7d6PP/54zz33xOeEwsAbisOSXRMnTvRcS/i07CGeBK59fjbwPOHcbdasGY4fdwE2ORYfFa1zIxuXG45hUwDDiEzYdNNN2YVDkdgGjGGXaZ1pgLse7LE2kGDzf//7X79+/fj16tmzJyW//PLLAQMGdOrUidr4yacjaAV3Db/99huu6Hbt2vETSE/5JV66dCm+PapCJbhLfvjhh2SiA9yZJs0vPbuefvppzy4Pc9xjvXv3bt68OTMKNAd3tE15T9cwmAIYhvOMBBDw/1VWVtrK6QJ6Ba1AVXgcEeW07iFjCwOQftnNhAmUR58+fRo3btykSZN9990X2eEu9vzzz2+yySYA5C+RBpjHHMAWSH0iUXK//fYbNWoUXn9qYCAeffRRcywGY7b7A0Z27VTzMWUYiyuuuGKLLbbgFMI8jMSnaHaZvxxuTwB3Pml0HntRb558/O6cHugzk2/G7sknn2Q4OENoBZc8rlazF1vcFpqOJxyvSZMm0VPGi9iwu+++m8O/+uqrXXbZhQr/8pe/MHe1ZpgWTWfjIdAcjZrCDz30EDW0atWKk5P7Y7jYbSVY4jbMHOKu2ZR86qmnoMdVzLXMjIWTytZQ62lmS9rEkCFD+MbAGK4XxDSTIrsLAzg9uPrwWKN6gcCFb/cmS3AzhF7Ej6CxzX2U8SLj+rWZHMj1ZTc9CcaXwJX58+d78lNsvvbaa9TJtUAZpv3uKy7hUcSiMLtg3BPuNZnu3nHvkVkKFz7DgW1MKjiR7LHLli0jHIV7EVwm7OWLgq8Ls5dvUbzd3AeAPN+WBx54oD0Q7HwoFn8ikRl/PjDZ2GeffTj/OS27d+/ON61pIsX3pPlepSP2Y1rnDGSYzOH85TuTHq255pqMPvNkAtLsLmPG8OHDr7vuOnpBB3fdddcffvjBFlBCBHwlII+7vXiVCDcBbh8TIIuQPfPMM/kiRmp8/vnnaFO8U7htUO3IhYsvvpiveH5vTGAoOffee6/71jausmQUUCpoDn5mjjjiCOp56aWXDjvsMFMYqbf//vtzT5ldZ511FveXuRWAWCQImwLcaMYwfl1OOOEEfjvRfPzY4B9N1pAnn1aYD9xwww18EbCLmn/++We6iSWoin/961/8pUKcpuydOnXqNttsg5Jj3sIv/ZQpU3DUEQ60/vrr88NJF3DQ2vrZRCsjpm1OnRK0S68hhpTkp4uJipkJJKwERAhufv/wDb/11lu33XYbcCBPYfyCKMtx48axic3c1uDnM2ElJpPfV2pA7/JjmbAYUyxqoDmUFn1niNHHnAzIfcqjHoguQK5x84RgA0ius8467npSnEimGD3lF51hpRWUKBJkyy23ROQR18G5989//pOgAhO1FR+7hfAiGoGZ2KBBgzhPhg4dip30nVmZ24aEaU458j2hFOQw22EQmT9gmI3ZuP766zklLrroIk5+1Aw3iJgOIUEIcSZUA73LLJdjmdgkbIvx4oylR7fccgvnCc82oOA5Fq1MEMh9992HGbhmmbd4DucQd/QaKvDyyy9nIm2KMRaAQqghEOkOWpnRN7NBjDzjjDOwxwRhu/3QtglzgTNlYux4bPHOO+/klGNkuahNmRSnma3EJpCVxEtAhhOPMAlsY0ZtvjFMGXQ8Wpb+Hn744VxHwEQagsXWkE2CbyEORxe6KyHqnUmsO4cJHuoWUfjtt98yIbeOcHeZZGkuE3ZxbfJt8+mnn1IPDgUmS8jo+EO4UvgGY/iYlcXvTZgDLiYefOlxBdEdADLJIVoMDU158DJMfOnxjcRpj9uekDZigdjFBci3B8PNJcn5yXcaE0tzedqGzInEHYP48CFbhgP5CmLSxbcu34fffPMN8po0BVJ8TzKg33//PdNgLgGcCBTmh8DWaRKcXdwSgQlXNFNELi5OWs4BANqSN910E64cJmBcUFwmXBpjx461e5UQAR8J+DotUOUi4B8Bt/cXNcxFgsKwzY0cOdLm4GElbfzctoBJJPOQeYrxPY7UeOCBB0w+3+lIJVsG9Ub9Hn82ioQC6Et28e1vC5Mwu5hdsAvHknsXOdZdZzxDiDx3AX5L3JvGC0vshMlETvFb4umpae7++++ncn7bTEn8YfxouT1M7mqNSym1x90oPwC6DzRpT9eMEHe7ijfffHPErin87LPPYph1laG9jCL3kLGtMCMy9/0Ru/xIjxgxAm+i3YsaRsYhi23OtGnTkLY2h6P4pWduYwqYmFrjeCYn9YlEAUpirQWO7EAYcQPB1AYx9kLPbJq/1onIJjM3nO52L9IQhYogszkcbk8Am2kSWE5HPJlmk3OPA1988UU2zdjhCLTeWbzU7EXmmsL40W1/TU7C8TJzRQpgJMCZBuDFN+URkW47TYueXlOS21CMMr5qPKzmQM/Zy6SFKaXZxV80vfG22hx3zZyxTAC4c2VvdpmncnnK05RPfZrZOk2CgUPIMp/nfDM5TPXpFNey2cQSNnmQxmwyakhDFKfZTPY32fcJtjHzYS8f5ldMX+FJX8y1aWqjufgPFzh7mc2yiysuWbsJ89GaHGVuTSA6udXDlxjfXe5G7YG4qCnMBWVzEiYoY89Pz2iOGTOGvZZYjx49ONPiK+F0ohiPZMTvIgfsfOwuSrrvQrjPBy4l5o2cyVRoy9uueWzzfE+aB0I47e2BJKiKYTI55gkQvg3MJt8qtMXUwpwtxgym5fZa5uLC1K+//tqU118R8JWAHsXgctMn9AQQTGgafDP4q8wHxYD3znzDGoccP/PJgkFr7T+SBUHMz7YpiZjmNrT5BSIH6YkIxoHkrse4wNlFwkhwu9fsspupE8S6uAsYzUqOcc5tt912pHFl8ZcfLVQs3muPO980h9eQO9rMbUxthMYCKllArSmT+q+hiqSg3dQlzV53R3bYYQfuG5h8pljcFUFYm004mylZsjqRdziPsRwnH7+XBx10ENqXOZUpj6cNUc4ArTwRZtWrVw9PvzkTUJAcy88zZ4spzzmD9922lfpEMsUoj/0mja+OBV5sX2w9yRIYg15kL9AI5UJ8MFhm+JIdYvNRD8kcriYfpW4LM4Wzhbk/wFyFkAm7N50EvlJTjIGmj+hOTiGTwyaZtfYahzpShksAyWsOtGcvTkoGCIlGJaTTsQeXLWqbOjmNTXl0IbdoXnnlFffhyU4zdxnSeKOZCaDPON/MLs5A3Nvu2vgCsRcIo4bnuNYue1pxbzK95Gzhw10R3LTcAePa8XwV4A7gBHZ/uMtHJWZk7YC6q02R5lEQ9nKD4rHHHuO7i5kzT8TiI8ezHn9UBk3Y0eR7lZtX9Iuzwp7MpHGrc6/A0xZHAZPlcez3p6dAmpvcaUF5M4I0ZA+xPK1t8d+TtnCKBBcLw82dOlOGM4F7mHzhcD/BHsXNOnMtk2O+ELI5PWy1SohArQQk3GtFpAIhIMDPAz//OOTMT6P5y+8Wv/RYjz7gd4vb4shrfhpx5eIpqVOv+OXje5wfJ7xlfHAY86tv3KvUQzQkUgZvVnyd7MLjmPDedHzhhDmeaATUHl4xpCq/THTT7DXSB2cev7648RLWw88bmp6IdrMXBY9TNlm0ScIaPJk8Con4QN5hDLfLceumUPCILay1NRAhYH+2iaZAVprb66aAjfew5T0JwmqJx0D5EXWNYxjy/KyawAAjFOiX+0zArW7OBNqiKkKP3BUydnYz9YlkinliCdx9sfWkSHDbnfgigOAKxUiUYprKFd2Gdk9Ys8l3Czt3H1EzIDWxGQkPj8/0jBfzHMIhrCqiPDl2BOMPJ4c7PFxod911l5lbmjIEohCawhyAs5G+m4VK0uy+GTv3YFEnwt3km/o9ZqcYmvjaEGG4/921ebqcojbTeuq/2GYUOVhw1nJCWnFpD6RF+Lg/XFzsZUbB32Sjbw7nzpL9cFOCTFM/k1hTgL9m8Rm0u82xiXSasIVNgla43WEWjuSrlQFlzmxHk3kCm1yqxBfxEC2XqjmKO1TEsOH4oGsmFguzPTWns2me20n2dZfiezKdyjkNPGeaCXtznx7u7wHODapNfUWk067KiEA6BBJIjXQOUxkRCBQBJCOq3bqTrW38lpBGcHCnmEBwwmrxNBOZQIw1m/hRbMkUCcQcwScUcIshNmkOvZjiwNS73DLIlOQ+bPwhnl93vJ787vJDSOAE9tNxwnBTKGZ3hXhhmWxwOD+lhFXgvLTuRncx0ogM/pqff/cubkCbXWRiGBEjeLKRnnjNeVQLuYxExqnsPsSkE2bGF6tTDnXSET4EW+OYZDhQPAYFst56eU2dCadV8c2lPpFM+fi+cFc0vqqEOcwAiYnnLgEjyBlLVcQBGwmSsLw7E+nA7QKigd2KwRQwqsh968B9YAbp+D7G56ToNVH7TC+Z1LkvELrJWpZIbQJ7UHsIZfyahH+kefam04t4I9M5KlmZ+NpSdDlZJTaf2jg/zSYxQnDgaQouQ1sgRYLC7OX2RYoyzH7tXuYGnGa4DMgx0t/s4pQjkVBfMrXjGkndhK3fJLjHSEP4vLkAmcjxhcYE3o4mopwR564C3wk818FA82iEuY3DITgRuD3ItzEBPFwChBTiDfHUn81mNt+Tabab29MjzUZVTAQgIOGu0yAKBHjSEYcrDmCPzHX3Dc8fHx7aw+vMg0REv/ArEq+e3YeYNIqQWA60oPub+oMPPuAxRKOiaJ3HkrhfTDHP4ezixwn3T7zT3ThpcErZQ9zuHJvpTvCLy21ubh3g6DL5xsFs0sxScJulWLgNiU8ZukPoCBKcB7/clbvThHuyyRN77kyTY3aZfHQ/UowPUgzPN48VouOtOvEcm2yTCjkKe6zTnXsayQonzDehQYTBsBfg/EWgJDTDGO+GRmF3N9M5kRLaYDJrPZ2YQOLW5dloW9ITRpWicp7DI1SXGGIe93QX4zYL8ght575T4e4jchOk9sFr27S7khymufNDcA4TS9ZscVfLtJk7XUhVO/Fg3N0FUhtmz0n3bSLGzn1CumtLnba1MRymJPfQCL1IeNqkriqDvYhsHhPnQsZ9wJdSrTXgt8YBzCgTG5bM3YA739ZDOBlpwgUJIeMJdZvPw+ukjTvDZpoEVx9gEdAs9M60yrM34SYnM1FnOEHMXoJS3N9mZPKlRzwJH25+ouN5XNUId3ZxofFkCB9OVE4VKmFOm7CVZJnmSufrLn7IUn9PUmHqM40CnB7urwVyzHMdmZ1sybqgfBHIjIBCZTLjpqOCRQD/Cu5qIjjdZhFAbH5I+B53u8r4naCYiZYxYtHze+OuhDRKlxBGIkOQI/aDx5Rd5pkn4nAI2zAPt9ljTYvsIsEvtM0nYXYhsrm/jNPa7mLBB5tOmDAzB3dfeKbTlkRG48pFHhENbDNJ2PJ41LhvTkwLq3PgqLZKzl3YpBEWUOKn1E2GhSnQGXZVDWYj7gPdVN35tabxPjLnsUHqeOw8gs9TA0+MeZ5VMNHb5tY2tQGWWYSnDGqSeky/CFaxN/SRO+641dQnkseS+E2CQMh0Q/OU8Ywg8z3zSJ+nWMJNzj186qxl4R5fcLEoCme4ZwKAvreRFQgsZjV24DDSdj9hQ9lkchnidkUEE9puI4BNhZ6+YwP+WndbGJYCHdMz5mN4bW2cG+EWPGxNpLu7kjTTqD3MY+5trw5W+MGkzGpLs1F3MdzVfPkwmu7MFGm+QwjVQ/jyteYuhj/bPKRLj+zHeN8JCyQuBcjWC47nm2PN0i7uSkzaPHXKfN4Ex9sCXPhcMnbTJhhQS49MwqLc9wyx1pZkssGs0gwcU3Qkvt2F/ibEy46pza81waqgBAryBeg+Z4w9njONqtzfk2zWep2yhix3jey1yfMJrN/Fw6k5vKlVawdVQASSEZDHPRkZ5YeJAFHs3HfmliuxBKwUgecbRw5hITiokDv86qCJWcmLHwnUDBoRbcdXMz3EQ893MWEe+LTwDxEx6QmaRFrhrWQ5PA8OAsT55UDTs0gcISjopHPPPZfveiQ+3/K4/wlE4YeTEA5+CNEH2GNiWtCdZJoK+Rnml5u/iBIUPIuUeVrxbGK2iQpFlWIAv9lmPRBbDMFKJjQIUSCyArkGBG4O2Oe3MBVj8HQSZmqPSpjAiY4IRo6be+4oJH66EASsDm7KE8OKzQgdvFAE7EKYCF37OFfCOhNmMtng+QF8b3DGbYxH1kwJknnFsBwlwZpuZuLBw3DAZ+y4/079IGJZOpgzOihInIvcFSGYh7sxZmbFSYLN2EnEFA2hNnBPWqWS+kRKaL87E1yIBixEAqKZcGGa4ARbBq857nZORWxg7JChnH62dVssYQKhiQTn/gbG48XknEGycPsIAtCjs+6jAGKKsSASqgXZZB//xRHLCc/pymOLKCqCFtwHZpmmR3hteULU7U0nWgOxyIVJF2iOS5UucxkCx9wnMY1iGGPH2thYyy63Z50CXNSApeOMEfNPsxwkWsq9wmn6xnNicCajhrkqWX0F9yonMEDs06jpV5VZSZ5woC80ypVl4qeph28Aj+PZoGMXjgPiWLhhyEOZdJ+LDmVMiBq34OyDKx5LiBbjJhh35+gjVxnrxsCcY+mmp6TZZMEZ5sx8cXEZcgURGci3JU+RckkyKPGHcDJzH5IgGc5hNC5fenTKFiOT1SEZU05FppqcuuZLjz5yDjNDpgCuBJb8Yig9Z6+tJEUCPwVnC6cTFx0k+WrCKc7jsNzhrPV7EquoGTi0y3lFJUbK2+ZY8BenDHNdFgTDfn5BuFqZi9KoLaOECBSMADNUfUQgjATcy0Ea+1GWfCOjxXHh4FG+8MILuTXMLpQNP1fcoEdLIQj4veGHxHaZmG+OQlJwERqfk91FwqwVQ7CmO9OkufPLIfwcsokbiZ8BPED8DPB7yWzBHoKHjNXH+C2kCeQCPwboTlMDR7EcOL98GMwvmXmA0tpgfKi4it1NswI3sg8hzlEs8W7ufdtDKEm8DeqchugsYQBQwpvlrgGdys+P+8017r3uNP51WBHSw+8r8wQmGO6jUAzMTIijpV/8hTA/yeZwM53A1Wc2uZ/O76K7ZtM1m0MfeWwOCHSKeYJZD94uPmiLmQR76RTzKwpDm2HlEEvblEE1MuugABH5zNYo4B5xfoCRSvBBOsS/gIkakp1I7EIwoblNK+YvOpKPzUEbgd34/DCD/Jr9fxbA98nkikponaBefKXAYZNi5pPwJFy5M/Y/JwmaG2lLDZwGOFnRVe4CRjEjOxCmnO1cDhjMWWHLIJqhbeZypulax4sucNrYGkhwoOVgWjSdNSNLL9wfywdTzYO5CG5UuFlHldZNzTykSJ2cBhxrDnHXbMow5YAbfUdOEfDmPiFrPc1MDe6/zOW4MDmL0MfmxoXdG99lz0jZku6EubHjvh7N3njbyOek5TxhlynjJmbTFp0pYy46hpVLkmscxUn8jNmV8C/nG1NTvBL0kQAYgqy4GZKwpM3k24nTgyuaQ7j2EdnIVlzppgCG2d5xnwfFzG1Dpn9cbuhmzgrbHbQ+E3JOM85AIDPlME1zc5Lrlxy+E7hCCdvjHqBtnf66u0xzFLZ7488HvBLMCTlnqI1Ti86awpwYqb8nuT3Ld5oR4uYMdBtPJYwOX+PYz3cIHeFS9ZiBT8TmeK4gm6+ECPhBoIRK7XeEEiIgApEngO5B9KAAAttTnlrjR5efZNzkeTASWY9bsU6LruTBqoyboC/c0kFVIDsyrkQHioAIiIAIBJOA7vsEc1xklQj4QgDHM9FEuOR9qT3TSt3L1+Dbw23GzW5iXTKtT8eJgAiIgAiIQDQJKMY9muOqXomAhwDLL3ATnNUbCAYlXtazt7CbxCOh3VlUjqgeYlcIXiKehDvshbVKrYuACIiACIhA0AhIuAdtRGSPCPhCgIfDeJyUpVeIfrZrsfvSUt0r5TFEZhREkbLcBNHbeNzjnwaue606QgREQAREQASiRkAx7lEbUfVHBERABERABERABEQgkgQU4x7JYVWnREAEREAEREAEREAEokZAwj1qI6r+iIAIiIAIiIAIiIAIRJKAYtwLMKysrcvy2yw9m+wVMwWwSU2KgAiIgAiIgAiIQN4JsC45b/vi7QF6xVU67CXc06GU4zKodt6FkeNKVZ0IiIAIiIAIiIAIhJPAr7/+yuu3w2l7Xq2WcM8rbtOYeTUg5yiLVZPD6+t5TT3vA+dNdQWwRk3mjoCGMncsC1xTMQwlS+Z//vnngOadXOZVrwWG7k/zxTCU/pALXK0aysANSaYGeYZy/vz5eDONNMq0yiI6TsK9AINtImRQ7Va4N27cmLSEewEGI6dN8mWkocwp0YJVVgxDySvoR40aBeI+ffo0aNCgYKx9brgYhtJnhEGpXkMZlJHI2o6EQ6ng4TS56uHUNEGpmAiIgAiIgAiIgAiIgAgUkoCEeyHpq20REAEREAEREAEREAERSJOAhHuaoFRMBERABERABERABERABApJQDHuhaSfsG2eGCP8K+EuZQacAANXVla2ZMkSBjHgphaPeURva4mx4hlu9VQEREAEok1Awj1A48tSptOmTZs7d26AbJIpdSHACLZt25b1gvSQTV2w+VsW1d6pU6cIP3zpLz7VLgIiIAIiECQCEu4BGg2j2lu3bs3KJFJ+ARqYtE3h1VoLFy5s0qSJXLxpM/O3oHnZWUVFxV/+8hddU/6yVu0iIAIiIAL+E5Bw959xei0QXIGvHdW+9tprp3eESgWOADKRJfYaNWok4R6csWnVqhWvPKusrNRyq55BIayrX79+ZJLw7NKmCIiACIhAMAno+zoo44KwwBR87UExSHaIQCQImCAZJsYS7p7xZHq54YYbejK1KQIiIAIiEGQCWlUmKKNDeDSm6G5+UMZDdkSFgK6pqIyk+iECIiACIuDI466TQAREQASKkQB3Ib7++mt63r1793r16hUjAvVZBERABMJGQB73sI2Y7HUReOSRR1q0aOHKSCuJC3bEiBFpFU2v0I477vjEE0+kVzbQpTLjmU6XJkyYsM466/zxxx/pFFaZ/BBAuL9Q8yGRnxbVigiIgAiIQJYEJNyzBFjgw1dUVY/56fcXvpjCX9I5sWbnnXc+++yzs6kq+xqStb7eeuvdcccddm/fvn2///57u5lmgjVG9tlnnzQL11rsxRdfnD59+hFHHPHee+/htlxzzTX5y9zA/WFXrfXYAumoZx6B/fvf/77FFlusscYazZs379Gjx+WXX84jmLaS3CZ++eWX448/nkUVy8vLO3fufNVVV2FAwiZYw/60007jAWuW1jn00EMhY4p169Ztu+22u/322xMepUwREAEREAEREIF0CChUJh1KAS0zcnzF4JcmVMxbYuxr17zRVQd023vTdgE11wez0JF86loxS63X9ZD48jyTgJ+S5Tj++c9/HnvssTznt/3220+ZMmXBggVNmzY955xz5s+f//DDD5sD11prrfgaMs5ZunTpnnvu+dVXXw0ePLh3796smjJx4sRhw4bdddddN954o6daFHb2S5h/++23LJhz//33b7DBBuPHjx80aBC+81tvvdXTFpt0/JVXXnn66aeZTpx++umHHHLIhx9+aIpBiQMvueQSrWESz005IiACIiACIpAOAXnc06EUxDKo9lMe+8yqdkycNm8JOeRnY+7AgQP/+9//3nnnncZhjKuV2tBquKjxobZp0+aYY46ZNWsWmXiRUYSjRo0yzd1yyy2sZYmHNWENpoznLw1ts802DRs2bNeu3cUXX2zW1aEMDns0Hx/EX8uWLa+44grz5C75kyZNQhoa2yjpdk5fffXVPXv2fOihh1ixG1NPPfVUhDVWIdMx7Prrr7etc7gJleEQt1+cNBVSDJGKAjYOZpzZzzzzjDmWLlPmtdde23LLLTH7gw8+mDlz5jvvvHPAAQdQABq0BSL+Mp2gAAk++OAvvfTSDh064B3fdtttjfcdz/Qmm2xy4oknmpp/+ukn5D7GsxeBO2/ePGMYFpoC7r//+Mc/aJp2zzzzTCyhvzvttNN99913ww03mGIGILdNoLfXXnuRiaubOGYMWHfddSHDYvO2QrpMDSxndPDBB//+++82353Ye++9mYQwW1h//fUPPPDA888//7nnnnMXMGnMHjp0KG3tuuuuGMYho0eP/uijj8zePfbYY/bs2Qx6/IHKEYFQEqha4Uwc5Xz9TOwv6Rx+/KsZI0NaedWKkkkfdJg9hr85ph1mJn6dgX4zyeHFUnxVyeMe0DFHqi5envSXgKiYq178nycyhs0Sx7n6xQm9N2hZr5Rk4k95/VggR+J9joNkJ/hk0003veaaayiDN5fV5dFhJ5xwAnpx8eLFF1100eGHH45qRB0iDdHxX3755c8//4y8xs+KbI2vIWFbOKf33XdfVP6///1vHLr4Yln+3OrURx99lNiMcePGffLJJ6hblCUFEIvIaDZJJ6wT+YuqHjlyJIm//e1vWMVqdyhF5ONxxx23++67o5vdByJATz75ZJPz+OOPX3nllVtttRWbqPbHHnsMKdylS5f333//6KOPhgPi2JRkjoGzGQmLIqdyJO/GG2/srtaTZgZChPeTTz7Zvn37559/HhHME4HUTIvYs99+++2///40ga7FSBzkxAJhyXfffUc9zEA8tbGJc53Cm2++uWeXe1gBeMopp1hvNzcEuDPAVAQmCPcLL7zwnnvu4fCxY8fCmf4edNBBcCMGxlNnwk0EesJ7CJ9++uny5cvhbI7aaKONGLgxY8YQJEMOExtmVsz0dtttt4TVKlMEwkRgwovOyIuc+Svj05q1d/a+2el2YA664F/NGBfSymvMLps/NfYFPeleJ4e0Q87ElzPQbybUr08WBCTcs4Dn56Go9m5Xvl7XFtDu0+Yv6X71GykOnHDNXo0bJB13nNwILMQormJTyd13341GtN5cvMJ4bRH3aOLrrrvuzTffREnjkh8wYAC+WA6JryGhMQhH6qFy5CYKj/hspgQIVvPqInYxT2BX165dkbmkEeuIRcLH8Uxb2zw14ynHPAoQUb3LLrugfV999VUqpJKbb7753Xff9Qh3ZLFRxniFiRFH7DJjIRCFzr711lu9evWifgQ67m2iRKxwZ0qDbjZNcweAuYqx2WOM2Zw8eTKOZ/6i2slhqoA+JocmULEAZEZEfDz1vPzyyxQAPgDpeLI+Ugb4zJpImA+eckaB9GabbcYUxWQyMeBuw58lHMc+tMBDAjTKdMUId2ZZTCTQ8ZRkQDkc8+xRCRM//vgjMTkJ42R49S/2ux8XBg6Zth4g0FO7qYQIhJUAOvKp/o7DN+7Kz/yKWM7h/85Wu/tXM5aGtPKQmu0r8PAyWXnF6P+MCSQVcBnXqAMjRgCHOpLXCFzbNVza6DxUGm5j9GLHjh3R1nZvOolvvvkGZWydxMRqE7/x22+/4aPlcHy0dhfFbrvtNuJeal2xDlWKajetIxkpbyU1mzNmzEhoGKoafzOSmjsJFECYLlq0yEpzcvCCu93bxitvquIWBDcKElZrMpl1YDmsbBkmBvbluOeddx5BO8xeuFFgM21Jk0Di8zFpPPeGj7sMEpyIcxzq3Byw+USq2DQJ5iG41bmzQeQ9IUkE6tBHpmeMArrflgR1auHOfRKE/mGHHZbspoetKmGCCCLaTbhLmSIQGgKEmuBrd6v2mOmI+JJY/vo7O6WZrq1Jza8xi3bNB3JVM/WEtPKQmu0r8IIxudjZaL/MT+/YyaxPDghIuOcAoh9VENCCazxZzeMmzh748MfJ9j5y7NbbdEr6NCQ1JzswYT56mhhuPNbuvUSlm03j4iV2mQ8h1O4y+U+7X42J7vds4o+PNwnJy40CBKsJDaKAif/mCUui0m15AtZt2t1NgsjnzJljd8UnqI35AzEk7lmHnQUxl8B9zq4ffvgBQRx/ODl4x82MgrRx2+NNN4E0prwZC0/sittIHlQgGofIGQL9KcYNBMJjmI0g3BO2mCyTuyLcx+AZ3H/9618Jy3CXgGqJrbJOd555cN864CRhUZqExyoz/wR4SpiIMtrV48J1gz9p9Kr4hNWOrI7l37Tuanm52fCvZuwLaeUhNdtX4H4zmeJw8nfaITcntWrJlICEe6bkfD4O3ZkioGWHLq1YQ4anUT2eGULX2zZvxN4UMe61Go4fHSexLcaag88++yzO7Phfd/zuPCr6wAMPDB8+nFAZ3LrGw+2pwVblThAXTrWE8hvPOtHYOMtZ6tuUIfbaFiaOBalqhG86NdsDa03QOsHlCPr//Oc/1sFPmA0yHTe8jY1JUQ+eeEJB0O7EuycsRgFgItB32CHBlx1B7TwziozGgU1ouImV9/QRqc3HXXm/fv0I7Pn888/d9wHcBTxppg30kbsWZnSeeuopW4AWPajtLk8CXzuq3Txyau9jeMqwl8nS22+/zUKQ7GJ2AUYmRbYYIVVGKdocJQpIgHHkCekCGhDWphdOD6vlslsEsiSgkz9LgLk4XMI9FxTzXge6nJUfWUMGpW61O2k+5Gej2qkBjY6Yw02LYxjJyLLcSHPEIpHQbBJJwnOWDz74ICVRvSxawiooeIsRoEjDCy64IL6GhDqPRyR5CvOMM87g2U0UHo9FnnvuubYkgo/Nk0466bPPPiOimpqp1tRMQAhB4WhrvN0mM+O/PAvLZOONN97AL86HeogvZ/5A2AwTEsRunz59eBCTSUWzZs2YmcQ3hHTGDArg0o7fSw5BMkcddVT//v3pAoVZhQZdS3ARz6QOGTKEBzdZ1ZGAfhz8FGOKgmqHP8ZQjCdxcYrH+8XNkos84gk05gPMGXDbE2zjduq7jWENR54ZBSN3TjCVh27tXtalIUiJgPW//vWvr7/+erI4GVQ7UfUERFGSLpjDjSudXVjCE8YsEAQ9JiEMHOcJxBhcVLt5MpVDOKMobB9dtTYoIQIhI9CkTSqDj3rG6bh9qgIp9uHOfDx2DyTxJ5uaqTGklYfUbF+BF5BJ6pM/8Ymr3BwTkHDPMdC8Vcd67fcevYV7HXd87TlZxx3ZikjF8UwANwuEoyNRezw5ymqAxGej3pDpKOxrr73WPlJJtAbhE4h7yiA342uIx0IgCk+OIvQpj85D8OFFtsVQurSOFkSMnnXWWXbZRAJaUPOEW2CJWSPSHpJBgjVhkMjEfthjeWyUhW7oGsvIEBTOGixEfXDPgfUcbRl3AvOYtxDon0y4U5g6eR6UcHZkKyofIUthws3pO4snotopQ5w6ap6VeQhJwh7CY3i3FIszIs3tSju2XaLqkfVMe6iZZdGZYLBcDOt1IuhtGXcCwizRSM0U5iWv9Au8pgDGMCujFR4LRlIzBPTdfaxJ8/ArEzY+9pYI+YY/UwLmXTZynUcdODfwuDNATOrMI7CmEhbD4fTg/ImvXzkFIcCZw0MONM2NFztnLoglIWsUXc6qJjyNusptYnpQEsvvvGvmQcAc61PNGBjSykNqtq/AC8gk40lpyC7yQJtbkr36CXT/AmkcDwjim8SVi1cSA5E+SFiWXOTRTBRY6ocdPR1iXUji3WcsWNK6aSPi2rP0tXsqL9Qmzl1WXEGYFsqAOrVLqAzxBtwZQJKihBhchlUyyMOQ2HfinZ544gkc/J5dfm/yJC7zz7peWeaqZMVS95MSfpua5/oZFGZxNMqMjls9eW49b835MpTxa3rEFuN1creqDHWtfjM1+/VqqPJPs8NWeUjN9hV4eJmAZaXssV+wHlFUU0R/khLQC5iSognFDpR6r85r/7VnB/5GQ7WHArvbSCJGcJwT2+POVNpDAD7ctci/aveYoU0RyA0B1mvf9YrVqsJTnhNtTc3U0+zPp/9jTeSqZqoKaeUhNdtX4OFlEjun9cmKgEJlssKng2slQNQHLzPyFCM43h1p7dkbuk1WkwydzXk2mDh7PnluVM2JgI8EGsXulzodtna2O9kh8JcQgoxXgfRYiSZj0T3imHkQMLc101BIK68xu/Ln978Y9XrPHfYqW3/HnNEOOZNQnieeE16bdSQg4V5HYCpeRwJEpRPy7jnIxAh5Mu3me++9Z9NKiIAIiEAQCVR8EbOq8y5O9+SPk2ZsN3MA/xbdC2nlpfWqO/aZ8r/5PTr2yaVqN2MUWiZhPU8yvjR0IAv4CoII+Eqgdc3H1yZUuQiIgAjkm0DFl7EW2/XId7tqTwREoLgJKMa9uMdfvRcBERABEagrgcqlzozYgjwS7nUlp/IiIAJZEpBwzxKgDhcBERABESgyAjMmOFWVTvmaTvM/3xlXZP1Xd0VABApGQMI9hp5X4bBaOeswbrvttuPGjUs4GqxO2LVr1/LychbeZsFs1pgzxXgvJstvs9gcu1hfnGWwtcJmQoDKFAERCBQB3kLAi7f4JHt1V6CsDZYxFV/F7CFOpsS8+C5Y1skaERCBCBNQjLszfPhw3vXIIieodtQ5b43hhTIEZrtHnfWnL7744oceeoiX4/CWSt7RU1JSwkttKMN7be69995HH32Uxbw/+eQTXsfDGu28kNJ9uNIiIAIiEDQC6HVemBA0q8JhjwLcwzFOslIEIkhAHncH/T1o0CAEN+8KRb7zhnkEumeoR48ezRLURx55JI553v7IK0KtY55duKx4gz27/va3v7HX7vJUok0REAEREIEoEJBwj8Ioqg8iEEoCxS7ceXfgp59+ysvezejxwkvSY8aM8QwmjnaKGUX+888/86JT3vhlyrCL98/jhmfzyy+//OCDD3j5vOfwsG/yKtOzzz47pL24+uqr6+pW/OWXX7ij8sUXNcu95aLbnGasYs4cLxeVqY4EBI444ojbbrstwQ5lJSfAi3754uJDInkp7YkjsKLSmT4+lttO9yvi4ChDBETAZwLFHioza9YsgtTbtGljOZP+9ttv7aZJ4GunZJ8+fYhfr6ys5KVCvAbS7CKEhrf1brTRRtx3pqrrr7/+qKOO8hzO5tKaj8mnPAlexM3HJPhLtVTOL2jdfkSrVjiTxzgLpzlN2jp/6ZX71W2Nxbx9u8a2lVuB/p+BePbZZ+1LkYiDOu200+pEtUOHDlOmTGnZsmWdjgKKebwhnhXBVNyQ2W677UyFHgvzT/ORRx4By+zZs+vaNFMaHuQwR6255prdu3dnnf4ddtihrvXkvDzXI9PL4447jkA1T+UwZ0S41sDu2ZVi031tpigW6l3MJ4cNG0YXeNNCgwYNQt2XFMbnfihnflu/ckl1gyaVTdfhezxF09qVWwK5H8rc2qfa0ibgGUqzmfbRxV6w2IV7muPPK4FuuOGGe+65hzj4H3/88ayzzuIhVJ5J5fCnnnrq8ccfJwieGHd8tHim27dvP2DAAE/NN9544+DBg92Zb7zxBmE5Ngd3bNu2bRcuXMivqc1Mnaj/42vl7w0uXVhhilU1abd456uWb5B7fz+TCqwy843UJgVk7+LFi93W1q9f372ZjpEMzaJFi9IpGV9mwYIFJpOJHJ57Pnfdddcll1zitsFjYXwldc1hgNLXXjxajZZ125Nmc5yflBwxYgQz1d9//x0n9wEHHMCjHZ5nQtKsLYfF/vKXvzA1evDBBwl781QLGWi///77nMaeXbVuvvnmm7WWCW8Bzk9j/Ouvv16nWU0Yu5zDoVxn9odbOs7v9Tt8+NrIMKIIu805HMqwowi7/XYoM/61DTuBDO3n97uYP/jB+cV6/vnnLYT+/fsfeOCBdtMk8LXjlLKZ//nPf1hDhp89ctZZZ527777b7kLQs/iM3bQJpNK8lZ9ff/2V0cKFj6Tg88cff6CEcH/+73//I021aX3Gj6i6qnnVVc2qV/6r2Wy+YvyItA5PWQhJd/TRR6+xxhrMJf7+97/vtNNOPG7LEVxdeGqZmaBrt9lmG2KETDVDhw7F0/nCCy9suOGGkDnkkEMQrzwq0LFjxxYtWpx++ul005Sk19RMJsV4DpibGyY/2V/mRTx7gCSlKiyxxdhkItS3b18swR6UsdlFvr0SSJN55ZVX9ujRw+w1g3vdddchNDGYKBpOgPPOOw/nMV52ZJ8p9tNPP1EJwVFscoit0CRMrxOiQBqyQhE1c0ZtvPHGnFpUNXbsWEKw5s6dayrnL/VwT8Bu2sT999+PIG7YsCGnECeVzb/gggu6dOkCMRYvuuyyyziXzC7TNY5CszI9IJOa2eShCwoTnIMZthKbwH53j6iEXWmOi5sMR5loItsKoWKMKacNeLnvNH36dNPoK6+8wiMiYFlrrbWIMSM2w+Sjp0899VTOMbqM8uZulcmfOHEi8wHqadq0Kc+NTJ061eSb/nK7gJHl5buHH364myqjyXVqSrr/ck1xZXFK11xt6f4xVyV/0z0ghOWYhgGNj/EXhLAHaZmc86GsfOUCvnUrX74greZVKHcEcj6UuTNNNdWNgGco+QHiVwmJZCWTEikIFLvHHUW45ZZbImVMZAV31UkjNN3KhjQqDe1lM413CqwJdyWMr0Ca8LE1kMANzMfmlJWVob1o5c+GqHx5co8vETIjLyI0wx5OoiS2WVLy+sVO511SxczUb0wp94Hx6YsuuggPJUIcBUYQwmeffUaYOIYh3ydMmPDkk0+ilZFriLCvv/4aTckuEKE12YVkR7gfeuihqHMeBuCRANIoKkQ2DRHM8MMPP7z44osIL1rZf//9qdDNwW0M0pnYZYQFx3JHApFH+ApL+pgyt956K7YRqoG/kBsdiN099tjj448/xuaHH3547733ZpgwDKqUN1RJv/vuuyzoSe8+/PDD448/nucZdtxxR7Q1iwudcsop6E5mYqYwf/n885//ZOEg0+JNN91EaAETCfITojBhJKBgjsE0YO2110aVmvkMstVUYv7W1L3qjCKT+zb0FIabb775559/jue4SZMm5tYNrFCrMIc2+WxeeOGFHEJ3uP/DQDz33HOms2QydbzllluAw2TmmGOOmTRpEnLZNGr+MhasnoQIZvUkcmgFY9IcF0pyiDEe2f3YY4+xyTqq5KCheT7khBNOoHJ2MbiM3TvvvEMBNpnvbbbZZghE2uV8QPFzCJ196aWXmJuh2pnN8iGTy+fggw/Gqv/+979MhAhz4llwbnlRD/1l5sDJ8/LLL8+ZMwfhTk+R++ziw90wbotxy9VzoVEnB9ZcbasuN3NIrX8zO6rWagNSwHyDYUy0u2lo57KPNQHu9TpsXs/1BR6QMS0GM3I5lMXAK8B9tENJIsBmBs+0FKK+SHahNfmlRxihIE888UTk5rRp0+g7oof4dQPhqquuwvmHaEOGEuKCPkM0mF1IK5y1KAnchOgnlCWiKjU6ppWcCHZyyUQVjzseQQxA4vx57NKF1pWe4wQ1p/ygvJnPIKdMKSIicN8SHYQERB0S/G2P3m233YgAYROhTI8QkWbXSSedhCOceswmapgc0rhaKYZiNvlMsqnZNmQy3X95tAAtbnNwPCOazSY+V6S53YWy55lgs0kTaFm7i7HD4242GSzjhjebaH3is00ajYiLlyFmk6GkEtSz2WX+4iNHofLwMZvJUODoxePOsQhTeyzodt11V7tJwmOh2cVJRcCVLYb+7tWrl920CaYETDXNJl3j+27GjBl2LzVffvnlZtOEtbz22mt2r00wXkwk7Gb642LIMGqwQg3THMZwAlMVBrOkkq0TFc5e5gY2xyRmzpxJPjMQNs844wzImBh0W4zri9Ns8uTJJgdnOeV5LpxN+st5xZVidnE+INbtgfj7KUkUvs0xCa6p1a4sz+4km+aqNF1LUiT02dxuivnba+47hb4zyTuQ46HkRuv1HWLfydPGJ29Te3whkOOh9MVGVZoWAc9QekRRWlUUcaFi97jzS4/mQ0zgCESv41ceOXKkeVYV6YCvjgJ8EEPIFP4iW1u1asV9fOvnw69JsDvOYPQTPlEUKlWZo0L6F6cmFxWSyNiPvxaBSxqxhTAlGMb2ix9+nMpmE0VlH1sE4HrrrYfT1OxiEzikv/nmG24s2Jo5lprJtBV6Euwi6sNmEm6BNxcbEHZkomvtLtLsspspEjyKYIcVwzbddFNTmDqxx9gZfzginokcHmJsYG9qFEx78C7bShCOKH67mTDBfUOwcwfAhmgzkbBOeu4G4PinAFqcfDzuthLmIZyQdpOEbRptTUnTI3rNZIO9TFSQ8u7ypFOMC9OhUaNGUYaGjIYmjT2E9IwfP545KjNe4yxBN3M3ww66aQKbOWG4x8JFwW0NpmrmfhQXF+S5ecLEjHOAORj3XtD9xhhuifAxNTBVYy6NhVtvvTU5nFdMoc2udu3auceL6QT53O4we/VXBHwhMGeis2yBU9bIaRn7VtRHBERABPJMQMI9BpzYmPjwGHN33owHchNvH5/44UFGIBnTVI3xhyfNIaDl0qlJ904a7Tz+t6R7j3rG6bh90r3UnNEH1Yi6JXzF6GZThxVq7ltdJjLBNsJmwvAhWyCfiQzsZEbHYw8EgSCsjampUaAgjTfaFOYmDEI/dR+Nd/yBBx6wsxrKG84E8xAvTkA/Ny6Q8twgcq97iDr31OzpoCFPzJJ5bN+oW88hKTYJ+GHiQQF3tahq4qP4MIsgrAUFzz0rusCE1oYVmTrR1iTIR/fTO2a22INkZ2ZI/hZbbIELn4nEW2+9xS0sIm2eeeYZc2Cyv24zPOeVWSTHM41JVo/yRSBDAmYF9zabOPX065khQh0mAiKQDQF99WRDz89jiUNo4NVkq9rrvKvTrL0zn/VkYnEXrk9JLJ+9pTGfdGYfHOfII/yjRB5TA8HEhFLwfCqx13i78XFmvPwfz2si9aiZxe+pmSAcQilwqSazk/LE1di9pHHf2mnDRx99ZHeRprDZxHjstLuyTPAkKF5/HMzmRbmmtmQoEs5PKMxykNzWc6t5j1X4/hG1BGLFryVKcD+ql2dSzSHGce45vNZNanCX4Z6AG1GKcSEMzH2gJ82To7jSWW3pnHPOQYUTTYRHnFmuu5gZZVS7OW2INXLv5Z4At7z4UBV+d8Q3xtSEu/9qnO5EuRA9n+IksbUxf+D5BKZJNkeJ1AS4lMxLJ+w1lbq8C3X1xgAAQABJREFU9sYI6NVLOg9EQAQKSmC1n9iCWqLG60IAXb73zc5TLHhCnLHV7rGYY2fvm7JR7VSAEx3XMgHEhI7woCeS0cSWIJqRlayygscXMUp8EQ/yEpjBW2PTNB0fLSKYaBBWPuFOBY8QoAvdwTCeeljvhQAJgqcRdjieiVRBI9oy6HieTeSpYpaUevrpp1m6xOxCO2IYMS24gVkuxpbPLEHsEzqSCk1wNpUQO5QMRcJ3b+2yyy54o4kzsWE5VIKn2SzJYqyCDD51HnjFp45+JQaJNRaZMvFMJ7sILMHRDgr6SPh+Zh1xHwUiTKJTRP8T41TXcbFVMRXBZoKkocRTpKhzHiQlfgZEPPCAzTjsGQJOpH/961943+kIg24PZy5EJucSJxgjyIO8RMXgd2d5eM407mIxzSMIjUnjVlttZY9KliCkxwTbJCugfA8B9DprQ3kytVkLAQn3WgBptwiIgL8EVlvXwt+mVHtuCXQ70Dn8306zWCjCnx987eSQn/WHJyDxjxLhgIpiERIeQDRV8lAjwh09TVwyipklXIxXPv0GqYHaCGg2T14SwuEOfvDUgxOXR1fRf0hePLssIGOXlKEkZqBukX0s74gEJJLEHM68AimPv5Zdngoz2GRtk4qKCjy+SEzzwQVOPemjQLYST8KiMe7WUeSYZz/E0BOKg8ylWmQrUpXY8U6dOnEIUTr4swnl4gEMmjZvD3BXlUGaOx68RIzpEIElTH6ooU7j4m6R530JwmFOxR0DplI48lHPdIF1flDhKHI+jCARVgwiHeHUsoczeaN1RDlzEh4q5WSgMJMB1uFB7rPaD6ff+uuvT0i9PSRZghsjPOFtnxBIVkz5IpAVAZ4sN8K97aqHWLKqUAeLgAiIQB0JlHAHv46HqHi2BFgWA8cqj1GbpwzRPUgWltf47bff0Gq1Psi4WvOsC0m8+8LpTpM2sbj2LCJkVqs28Bv4jJGGfAJlKaEyDC7DigB1G/bVV1/xFCZPatpHAtx7lc6eAMFI3ItgRZr4qtD03N+o65VlrkoWPE0xsYxvK1w5nK7cA8Fmpt+eMzZcHUltbS6Hct5vzj82cUrLnEumOPVreeI8tVXamwGBXA5lBs3rkNwR8AylRxTlrp1o1qRQmZCPK0q90w4h70P0zSegiKc2kY+4oqPf20L0EHnN+k6FaDnEbRKJ9Oijj9IBFnXlsYcQ9yRvpht3e6uNpdrzhlwNiYAIeAis5hf07NOmCOSHAKHhuKI9H16mk5/W89MKQT5S7f6hJtDILFrqXxOqWQT0ZKrOAREQgYITkMe94EMgAxy75qCbhed9n+5dpAmJ9uRoUwREQAT8JaAnU/3lq9pFQARqJyDhXjsjlfCbQOo1B/1uXfWLgAiIQFoE/hTuejI1LVoqJAIi4AcBhcr4QVV1ioAIiIAIRIvAwhnOAl6dUeK0+fN1y9HqnnojAiIQDgIS7kEZJ/N2noRv8AmKibJDBEJIQAtnhXDQAmlyxVcxs1p2cRo2CaR9MkoERKAoCChUJijDzLIYrMg2depUltZmhYcUb9kMisWyI44A865ly5ax/mCEF9eL63SgM1DtvDmLqynCqzoGegCiZFzFF7HetOsRpT6pLyIgAqEjIOEelCFD6rHUNO/6QbsHxSbZUUcCyMTFixeXl5dr3lVHcj4WZyzWWWcd3hLqYxvhrBomvOIK2wUnrQHUq5fSwqRCIiAC/hKQcPeXb51qx9HOm1BYXJnXT9bpQBUOCAFeKvH+++/zyk/5dwMyIpjBWEiYJhwOsPTu3TvhLmUmIKAlZRJAUZYIiEC+CUi455t46vbMPX3JvtSUArsXJcS8i3ffagQDO0YyTAQyIbB4jjN3UuzAdlpSJhN+OkYERCBXBCTcc0VS9YiACIhAmAjwSAaxeVjcrl07PZVRy8hN+zpWoEVHp3zNWkpqtwiIgAj4SUCryvhJV3WLgAiIQFAJcHeId5/xIRFUGwNjl+JkAjMUMkQEipyAhHuRnwDqvgiIgAiIQG0E9Oql2ghpvwiIQH4ISLjnh7NaEQEREAERCC0Bs4h7u56h7YAMFwERiAgBCfeIDKS6IQIiIAIi4AuBZX84s76P1axF3H3hq0pFQATqQEDCvQ6wVFQEREAERKDoCEwb7zjVTtN2TpPWRdd3dVgERCBgBCTcAzYgMkcEREAERCBQBPRkaqCGQ8aIQHETkHAv7vFX70VABERABFITMMK9rVZwT41Je0VABPJBQOu454Oy2hABERCBoBHgfWE77bQTVunNsrUMzbQvYwUU4F4LJu0WARHIBwEJ93xQVhsiIAIiEDQC6PWdd945aFYFzp7Kpc6Mb2JWSbgHbmxkkAgUIwGFyhTjqKvPIiACIiACaRGYMcGpqnTK13Kar5NWeRUSAREQAT8JyOPuJ13VLQIiIAJBJVBdXT1z5kysa9WqVUlJSVDNLLRd9tVLQlTooVD7IiACEJDHXaeBCIiACBQjgeXLl99b8yFRjP1Ps89aUiZNUComAiKQFwIS7nnBrEZEQAREQATCSODPd6b2CKPtslkERCB6BCTcozem6pEIiIAIiEAuCKyodKbz9iWeTO2Zi+pUhwiIgAhkS0DCPVuCOl4EREAERCCaBGZ971QucRo0ddbsFM0OqlciIAJhIyDhHrYRk70iIAIiIAL5IfDnq5e6O6X6rcwPcbUiAiJQCwF9GdUCSLtFQAREQASKlICeTC3SgVe3RSC4BCTcgzs2skwEREAERKCQBKZ9FWtdr14q5BiobREQgdUIaB331XBoQwREQASKhABvTu3VqxedJVEkXa5bN6uqHC0pUzdkKi0CIuA7AQl33xGrAREQAREIIAH0+p577hlAw4Ji0pyJzrIFTlkjp+WGQTFJdoiACBQ9AYXKFP0pIAAiIAIiIALxBCq+iOW12cSpJw9XPB3liIAIFIaAvo8Kw12tioAIiEBhCVRXV8+bNw8bmjdvXlJSUlhjgti64mSCOCqySQSKnYA87sV+Bqj/IiACxUlg+fLld9Z8SBQngVp6rSVlagGk3SIgAgUgIOFeAOhqUgREQAREINAEqqsdCfdAj5CME4EiJSDhXqQDr26LgAiIgAgkJTDvN2fxbKe0zGndLWkZ7RABERCBvBOQcM87cjUoAiIgAiIQcALG3d5qY6esYcAtlXkiIAJFRUDCvaiGW50VAREQARFIg4BevZQGJBURARHIPwEJ9/wzV4siIAIiIALBJqAA92CPj6wTgaIlIOFetEOvjouACIiACCQhIOGeBIyyRUAECktA67gXlr9aFwEREIHCECgtLd1qq61om0RhLAhsqwumOwsqHKck9vYlfURABEQgSAQk3IM0GrJFBERABPJFoKysbL/99stXa6FqxwS4t+ziNGwSKrtlrAiIQPQJyNES/TFWD0VABERABOpAQHEydYCloiIgAnklII97XnGrMREQAREICIHq6upFixZhTOPGjUtKSgJiVSDMkHAPxDDICBEQgQQE5HFPAEVZIiACIhB5AsuXL7+15kMi8p2tWweNcG+7Wd2OUmkREAER8J+AhLv/jNWCCIiACIhAWAgsnuPMnRQztp2Ee1jGTHaKQBERkHAvosFWV0VABERABGohUPFVrECLjk75mrWU1G4REAERyDsBCfe8I1eDIiACIiACgSWgd6YGdmhkmAiIAAv4CoIIiIAIiIAIiMCfBPRkqk4FERCBABOQcA/w4Mg0ERABERCBPBOQcM8zcDUnAiJQFwIS7nWhpbIiIAIiIAIRJrB0oTPrh1j/2vWIcC/VNREQgfAS0Dru4R07WS4CIiACmRMoLS3t0SMmT0lkXkvEjpw+3nGqnabtnCatI9YzdUcERCAaBCTcozGO6oUIiIAI1I1AWVnZQQcdVLdjIl/aLCkjd3vkB1odFIHQEpCjJbRDJ8NFQAREQARyS0AB7rnlqdpEQARyTUAe91wTVX0iIAIiEAYC1dXV5p2p9evXLykpCYPJ/tuod6b6z1gtiIAIZENAHvds6OlYERABEQgrAVT7jTUfI9/D2o0c2l251Jn5Taw+hcrkkKqqEgERyCkBCfec4lRlIiACIiACISUwY4JTVemUr+U0XyekPZDZIiACkScg4R75IVYHRUAEREAE0iBgA9wVOJQGLRURAREoCAEJ94JgV6MiIAIiIAIBI/CncN8sYGbJHBEQARFYRUDCfRULpURABERABIqXgPW4Fy8C9VwERCDoBCTcgz5Csk8EREAERMB3Aisqnen/i7XSrqfvbakBERABEciUgIR7jNyQIUPWW2+9Ro0abbvttuPGjUsI84477ujatWt5efm66657zjnnLFmyxBTjQFZSc39OO+20hDUoUwREQAREIKAEZn3vVC5xGjR11uwUUAtllgiIgAg4jtZxd4YPH37uuefed999qHbU+V577fXdd9+1br3a+66feOKJiy+++KGHHtp+++2///77gQMHotRvv/12TqGPP/54xYoV5lwaP378Hnvscdhhh+nUEgEREIGAEygtLe3WrRtGkgi4qfkwzwa4i0Y+cKsNERCBDAlIuDvo70GDBh177LEgRL6/8sorCHRkupvo6NGje/fufeSRR5KJi71fv35jx441BVq1amVL3nTTTZ07d95pp51sjhIiIAIiEEwCZWVl8jKsGhoj3NvqydRVSJQSAREIIIFiF+7Lli379NNPL7nkEjM2eJ523333MWPGeIYKR/tjjz1GFM0222zz888/v/rqq8ccc4ynDFVRBuc9znjPLjaX1nxM/vz580nw0hPz3hP33/gDlRMiAhrKEA1WalM1lKn5hGhvmkNZb+rn3HeobL0pr5MNUe+KytQ0h7KomIS0s56hNJsh7Uv+zS524T5r1iwCXdq0aWPRk/7222/tpknga6dknz59eEl4ZWXlySeffOmll3rKjBgxYu7cuUTRePLNJi8oHDx4sHvXG2+80bhxY5vz5ptv2rQSoSagoQz18LmN11C6aYQ6XctQVlftN+ULhPv7P8xb8Nuroe5p5I2vZSgj3/8IddAO5aJFiyLULd+7UuzCPU3A77333g033HDPPfcQB//jjz+eddZZ11577RVXXOE+fOjQofvss0/79u3dmTaNUx9nvNnE484TrnvuuWezZs3IYa7J6UtwfP369W15JcJIQEMZxlFLaHMxDCU3CW+99Va6f/755zdo0CAhhwhkpjWUs38q+2JJdVmjHQ4+zinVz2JAhz2toQyo7TJrNQKeoTRhCKuV0EZyAsX+DdWyZct69epNnz7dIiLdtm1bu2kSaHRiY0444QQ2u3fv/scff5x44omXXXaZfahr0qRJb7311nPPPec50G42rPnYTRLIdLdS92y6SyodLgIaynCNVwproz2U3D80fY92N9Pq48zYQpAlbTat37A8xfmgXUEgUAynaxA458EGO5Qk8tBcZJoo9sUE8DNtueWWb7/9thnRqqoq0r169fIMMPdxrEZnF1qfv/Znj/TDDz/MQjT77bef50BtioAIiIAIBJ2AXVIm6IbKPhEQgWInUOwed8afCJYBAwZstdVWPHjKcpB4080KM/379+/QoQOx6ZQ54IADWHxm8803N6EyOODJMfKdvch9hDuVsEpDsZ9Q6r8IiIAIhI6A3pkauiGTwSJQrAQkNJ2+ffvOnDnzyiuvnDZtWs+ePUeOHGmeVZ08ebL1sl9++eWsFcPfKVOmsP4jqv3666+35wxBMhQ+7rjjbI4SIiACIiAC4SBAyJCEeziGSlaKgAjoBUw158DpNR/P6cADqTYHV/pVNR+b407wmKk7bMa9S2kREAEREIFAE5j3m7N4TuyZ1Nax11HpIwIiIAJBJlDsMe5BHhvZJgIiIAIi4DsB425vtbFT1tD3ttSACIiACGRHQKEy2fHT0SIgAiIQTgKEAnbp0gXbbUxgOPuRtdWKk8kaoSoQARHIGwEJ97yhVkMiIAIiECACRADyarkAGVQoUyTcC0Ve7YqACNSdgEJl6s5MR4iACIiACESGwLSvYl1p1yMyHVJHREAEIkxAwj3Cg6uuiYAIiIAIpCSwYLqzoIKXLzltN01ZTjtFQAREIBAEFCoTiGGQESIgAiKQZwLLli279dZbafT888/nVXR5bj0ozRl3e8suToM1gmKS7BABERCB5AQk3JOz0R4REAERiDSB5cuXR7p/aXSu4otYIcXJpIFKRURABIJAQKEyQRgF2SACIiACIlAIAnoytRDU1aYIiEDGBCTcM0anA0VABERABEJOoEJPpoZ8BGW+CBQZAQn3IhtwdVcEREAERMAQ4IWpcyfFkm27C4kIiIAIhIKAhHsohklGioAIiIAI5JqAcbe36OiUr5nrqlWfCIiACPhCQMLdF6yqVAREQAREIOgEFOAe9BGSfSIgAl4CWlXGS0TbIiACIlAMBEpKSjp27EhPSRRDfxP0UcI9ARRliYAIBJqAhHugh0fGiYAIiIBPBOrXrz9w4ECfKg9HtX++M7VnOKyVlSIgAiLgOAqV0VkgAiIgAiJQfASWLnRm/RDrdrvNiq/z6rEIiEBYCUi4h3XkZLcIiIAIiEDmBKaPd5xqp2k7p0nrzCvRkSIgAiKQXwIKlckvb7UmAiIgAsEgsGzZsjvvvBNbzjrrrAYNGgTDqDxaoQD3PMJWUyIgArkiIOGeK5KqRwREQARCRmDRokUhsziH5kq45xCmqhIBEcgXAYXK5Iu02hEBERABEQgOAb0zNThjIUtEQATSJiDhnjYqFRQBERABEYgGgeVLnJnfxLrSVk+mRmNE1QsRKBYCEu7FMtLqpwiIgAiIwJ8EZkxwqiqd8rWc5uuIiQiIgAiEiICEe4gGS6aKgAiIgAjkgoANcC/al0/lgqLqEAERyD8BCff8M1eLIiACIiACBSXw56uXehTUCDUuAiIgAnUmoFVl6oxMB4iACIhABAiUlJS0b9+ejpCIQHfq1oU/Pe4KcK8bNpUWAREoOAEJ94IPgQwQAREQgQIQqF+//qBBgwrQcMGbXLHcmcbbl3hnas+C2yIDREAERKBOBBQqUydcKiwCIiACIhByArO+d1YsdRo0ddbsFPKeyHwREIGiIyDhXnRDrg6LgAiIQFETsHEypfoFLOoTQZ0XgTASUKhMGEdNNouACIhAtgSWL18+ZMgQajnttNMIm8m2uhAdr1cvhWiwZKoIiMDqBCTcV+ehLREQAREoDgLV1dXz5s2jrySKo8cre2k87nr10koe+l8ERCBEBHSjMESDJVNFQAREQASyI1BV5WgtyOwQ6mgREIECEpBwLyB8NS0CIiACIpBfArN/dpYtdMoaOS03zG/Dak0EREAEckBAwj0HEFWFCIiACIhAOAhUfBGzs82mTj1FioZjxGSlCIiAm4CEu5uG0iIgAiIgApEmoDiZSA+vOicCkScg4R75IVYHRUAEREAEVhKwa0GuzND/IiACIhAiArpXGKLBkqkiIAIikDMCJSUlrVq1ojoSOas04BWxfs6fwr1HwC2VeSIgAiKQkICEe0IsyhQBERCBiBNg7fZTTz014p30dG/er87iOU5pmdO6m2ePNkVABEQgFAQUKhOKYZKRIiACIiACWRMw7vbWGztlDbOuSxWIgAiIQAEISLgXALqaFAEREAERKAAB887UtoqTKQB7NSkCIpATAgqVyQlGVSICIiACISOwfPnyBx54AKMHDRpE2EzIrM/MXAW4Z8ZNR4mACASGgIR7YIZChoiACIhAHglUV1fPnDmTBknksdmCNiXhXlD8alwERCB7AgqVyZ6hahABERABEQg8gQXTnIXTWETHabtp4G2VgSIgAiKQmICEe2IuyhUBERABEYgUARPg3nJDp8EakeqXOiMCIlBMBCTci2m01VcREAERKFoC076Mdb3dZkULQB0XARGIAAEJ9wgMorogAiIgAiJQGwEFuNdGSPtFQASCT0DCPfhjJAtFQAREQASyJiDhnjVCVSACIlBwAlpVpuBDIANEQAREoAAESkpKmjdvTsMkCtB8npvkhalzJ8fabKtQmTyjV3MiIAK5JCDhnkuaqksEREAEwkKAtdvPPvvssFibpZ0l07+O1dCio1PeIsuqdLgIiIAIFJCAQmUKCF9Ni4AIiIAI5INAybSvYs200ztT80FbbYiACPhHQMLdP7aqWQREQAREIBAEJNwDMQwyQgREIGsCCpXJGqEqEAEREIEQEli+fPkjjzyC4QMHDiRsJoQ9qIPJK4V7zzoco6IiIAIiEDwCEu7BGxNZJAIiIAL+E6iurp46dSrtkPC/tUK2UG/FEuf3n2IWaBH3Qo6D2hYBEcgBAYXK5ACiqhABERABEQgsgeaLJ5c41U7Tdk6T1oE1UoaJgAiIQDoEJNzToaQyIiACIiACYSXQfPEvMdP1ZGpYB1B2i4AIrCIg4b6KhVIiIAIiIALRI9Bi0aRYpyTcoze06pEIFB8BCffiG3P1WAREQASKiUDzRb/EuivhXkyDrr6KQFQJSLhHdWTVLxEQAREQAcepXNJ0yZQYCAl3nQ4iIALhJ6BVZcI/htHqwYqq6nETZ89YsKR100bbdFqrXmkuX8bud+VjJ87+dFbJ2hNn99qgdQ4t99vsMAL3m4lPQ8nF6p/lGdTcuHHjdL8/qlY4k0Y7C6c7Tdo4Hbd3Suule2Ct5fyrmaarVpR++WSpU1XdoGlJk7a12qICIiACIhBwAhLuAR+g4jJv5PiKwS9NqJi3xHS7XfNGVx3Qbe9N2+WEQr4qr/fvHz7JoeX5MjvGOIdmU5t/lvtX8+pm53goV6+crVwCz4BJgwYNLrjggpgdtX4mvOiMvMiZH1s7MvZp1t7Z+2an24FmK6u//tWMWTWV16sxu2TZAufO7jkzO6s+62AREAERyJyAQmUyZ6cjc0sA5XHKY59Z1U7l0+YtIYf87BsKaeUhNZvx8s9y/2r21WxfK/eVSUz+PtV/lWqnJ/MrYjnkZ/nxr2YM87XyLDuuw0VABEQgUwLyuGdKTsfllAB3+fG1e14DYzbPe/rLTybNKS3JPGamqrp62NjJoas8pGZzXvhnuX81+2q2r5UnY8IFwzW1R7e2WUVtEceCr51F0Ff71GyOOMX59SOnJFPvT3WV8+mjvtSMqUkrL3FGXuxstF8uQ31WI6MNERABEfCXQEnk35nnL7+Map8/f37z5s3nzZvXrFkzKuDF46+++uq+++4b+beOp6A15qff+z3wUYoC2iUCIpABgWGDtuvVee2EB/LN8/jjj7PrqKOOSvrlM3GU8+j+CQ8PceaAl51OO4TY/mI1Xb+VkRl5z1B6RFFkuulTR+Rx9wmsqq0bAZ5GTXHArhu13qB1kxQFUu/6ccbCd76dkaxMYCsPqdlw9s9y/2r21WxfK0/NJMWVhddm0qRJ2JbKfcPTqCk+XfZyWm2YYn+qXTO/d354PWmBbGqm0tSVp+5UUpu0QwREQAQKT0DCvfBjIAsgwBoyKTgM2mH9ZF7DFEfZXbjzUwj3wFYeUrPB7p/l/tXsq9m+Vp6aSeory14jSROsIZPis/0Zmbuu8eWnEO7Z1IzBqStP3akU/dUuERABESg0gUzDEwttt9qPGAFWfmRJk/gwdnLIZ282/Q1p5SE1m5Hyz3L/avbVbF8r95VJbOVH1pBxEl2azTrE9mb88a9mTPK18oy7rANFQAREIGsCEu4xhEOGDFlvvfUaNWq07bbbjhs3LiHVO+64o2vXruXl5euuu+4555yzZMmq0I4pU6YcffTRa6+9Nnu7d+/+ySefJKxBmSkI8PwcKz9SwC0QTJr8rJ6uc5yQVh5SsxlE/yz3r2Zfzfa1cl+ZxB7iZOXH2Cfu0tz7pqwe8fSvZoz1tfIaHPojAiIgAgUhIOHuDB8+/Nxzz73qqqs+++yzHj167LXXXjNmeOOhn3jiiYsvvpgy33zzzdChQznk0ksvNQM2Z86c3r1782jXa6+9NmHChNtuu23NNdcsyFiGvVHWa7/36C1aNmloO9K2eSNycrKOu6mcCsNVeUjNBrJ/lvtXs69m+1q5r0xi67Uf/m+nmet1Cvjgycl+HXf/aga3r5VTvz4iIAIiUAgCWlXGwcu+9dZb33333fCvqqrCoX7GGWcg093DcfrppyPZ3377bZN53nnnjR079oMPPmCTkh9++OGoUaPc5VOnPQ9Qex6vTn1s5Pd+8P3Mox8a16ZZwzv6bk4MAN7EHHY5g1dLpt86lY/5ccYbo8buucO2enOq4eYfcP9qxnL/htJU7tOrauvKZNmyZTfeeCMmXXLJJbyMyQxZ0r/+vd/Uv5rpTNWKyp/f/2LU6z132Kts/R2zukWQFI125ImAfivzBNr/ZjxD6RFF/rcf7hai83Aq7vDjjjuuY8eOdRoQfro+/fRTfrfMUaWlpbvvvvuYMWM8lWy//faPPfYYUTTbbLPNzz//zOqNxxxzjCnz4osv4qQ/7LDD/vvf/3bo0OHUU08dNGiQ53Btpk+gYn4sBmnDNk2zeRo1WXNMA/yo1jRH5dt2Wuv3b6r5m9v5ht9m+8rEp8r9ZuLTUHKq+Gd5BjUnXQUy/hIi+MSnJRT9q5lelNar7thnyv/m9+jYR6o9flSVIwIiEDoC0RHuL7zwwvXXX7/TTjsdf/zxhx56aMOGqyIuUozKrFmzVqxY0abNqpUTSH/77beeQ4488khK9unTh3XTKisrTz75ZBsqg46/9957CbYh5+OPPz7zzDPxXQ0YMMBTw9Kaj8lkckmCGScfk7B/TYFi/vvr7D/ofrtmDQ2ccKFwD2i4LJe1HgLFMJQlJSUXXHCB6XgYLzfPkCXbLIahTNb3iOVrKCMzoJ6hjPD3jx9DFqlQmc8///zhhx8eNmwY2vqII47AAU8MTGpqU6dOxU0+evToXr16mZIXXnghvnMiYdwHvvfee1R43XXXEVfz448/nnXWWbjVr7jiCsog07faaitqMOUR7sj3eJ/91VdfPXjwYHedxM03btzYnaM0BIb9VPrRjNJ9112x1zqelzUKjwiIgAiIgAiIQNQILFq0CPeofStl1LqX6/5Ex+MOmc1rPjwe+tJLL6HgeWZ0o402wgE/cOBA3lSaEF3Lli3r1as3ffqql4yQbtu2racwGp3YmBNOOIF81o35448/TjzxxMsuu4zQmnbt2nXrFlsOxXw23njjZ599duXWqv+JxsErb7bxuBNJv+eee9o3p7755pt77LFHHW5br6o4aqmnHvnUmfH7Tltvtu/mHULXN9wGGsrQjVpCgzWUCbGEMVNDGcZRS2izhjIhljBmeobShCGEsSMFsTlSwt0QJJqFc4LgdRIs8MJTp8juBx54oG/fvvGI8ZdvueWWPHV60EEHsZeHU0nzKKqnJNNBNLrNROuTNq8bZHrw3Xff2V3ff/99wjh7Qnc80TvIdLdS92zaCostYWLc1127iRtOuCBoKMM1XimsjfZQcmfyqaeeovuHH354WVkEfwvcIxvtoXT3NPJpDWVkhtgOJYnIdCoPHYnUlzWPmZpQGSRy//79WZ19gw02AOJdd91FBEtC4c5eHOGEpBPuwoOnLNaON/3YY48lnxqIojGrLhxwwAG33347Dn0TKsNMgBwj31nTnUdXb7jhBn78eHr1XzWfPIxcJJtgLjR17mK61qFFeSQ7qE6JQHAI4Kf44YcfsIdEcKySJSIgAiIgAikIREe4E8HCQ6XEn7DOulXVpuf9+vUjKj0ZBQT9zJkzr7zyymnTpvXs2XPkyJHmWdXJkydbL/vll1/Og1z85V1LrVq1on4ehDUVEkb//PPPEwlzzTXXdOrUCel/1FFHJWtL+akJzFm0fMnymIZwL7ie+hDtFQEREAEREAEREIEiIRAd4Y7Dm6dR8ZHHjxyB7KldSsTGxIfH8ECqrYr7yCw3ycfmuBP713zcOUpnRsC423kHU8OyWDCSPiIgAiIgAiIgAiIgApZAdIS7WePFdkyJMBJYGSez6v2mYeyFbBYBERABERABERABPwiseuDSj9rzWSdrt998883uFm+55Rbei+TOUTrgBIxwb68A94CPk8wTAREQAREQAREoBIHoCPf3339/3333dTPcZ599yHTnKB1wAlPnxV6bKuEe8GGSeSIgAiIgAiIgAgUhEB3hvnDhQtZ2dENkgSEtDuoGEvz0lJolZSTcgz9SslAEREAEREAERCD/BKIT486qMsOHD2dxGAvxySefdL8ayeYrEVgCinEP7NDIsOgRwNOR7IH76HVWPRIBERCBaBCIjnDn4dRDDjnkp59+2nXXXRkb3qM0bNiwp59+OhrjVCS9UIx7kQy0uikCIiACIiACIpABgegId9ZWHzFiBC9CeuaZZ8rLyzfbbLO33nprp512ygCKDikIgWWVVTMWLKVphcoUhL8aFQEREAEREAERCDiB6Ah3QO9X8wk4cZmXjMD0+Uuqq50GZaVrr7HaswrJyitfBEQgGwKVlZW8PI4aDj74YF5VkU1VOlYEREAERCA/BKLzcGp+eKkV/wj8GSfTvBEvqfWvFdUsAiJgCPBaugk1n9TvpxMuERABERCB4BCIjpdlxYoV//jHP5566qnJkycvW7bMIp49e7ZNKxFkAlPnLcY8xckEeYxkmwiIgAiIgAiIQAEJRMfjPnjw4Ntvv71v377z5s0799xzeVC1tLT06quvLiBcNV0nAlPnahH3OgFTYREQAREQAREQgeIiEB3h/vjjjz/wwAPnnXcewZr9+vV78MEHWRryo48+Kq7xDHNvtYh7mEdPtouACIiACIiACPhOIDrCfdq0aSzlDrAmTZrgdCex//77v/LKK74jVAM5IqBF3HMEUtWIgAiIgAiIgAhEk0B0hPs666xTUVHBKHXu3PmNN94g8fHHHzds2DCa4xbFXmkR9yiOqvokAiIgAiIgAiKQMwLREe6saMZLlwBzxhln8DKmLl269O/f/7jjjssZKlXkM4EKxbj7TFjVi4AIiIAIiIAIhJpAdFaVuemmm8xI8Hxqx44dR48ejXbnrUyhHp7iMX7+kuULllbS3/bNy4un1+qpCBSQQP369S+55BIMIFFAM9S0CIiACIhA+gQiItyXL19+0kkn4Wjv1KkTnd+u5pM+BZUsOAETJ7Nm4/rlDeoV3BgZIALFQIAXJjRooJedFcNQq48iIALRIRCRUBk8Rs8++2x0hqX4eqIA9+Ibc/VYBERABERABESgbgQiItzp9EEHHTRixIi69V6lA0NgigLcAzMWMqRICFRWVvKdyYdEkXRZ3RQBERCBsBOISKgMw0BE+zXXXPPhhx9uueWWa6yxhh2YM88806aVCCyBlWtBKsA9sEMkw6JGoKqq6ssvv6RX++67b9T6pv6IgAiIQEQJREe4Dx06tEWLFp/WfOxgEcQp4W5pBDmxMlSmUZCNlG0iIAIiIAIiIAIiUEAC0RHuEydOLCBHNZ0lgZXCXR73LEHqcBEQAREQAREQgcgSiE6Me2SHqDg6NlUx7sUx0OqlCIiACIiACIhAxgSi43FP9q6lhx56KGM6OjA/BFZUVU+bv4S2tIh7foCrFREQAREQAREQgTASiI5wnzNnjh0AlnUfP3783Llzd911V5upRGAJzFiwBO1eVlrSqmnDwBopw0RABERABERABESgsASiI9yff/55N0oWTDjllFM6d+7szlQ6mARMgHvb5o3qlZYE00JZJQIiIAIiIAIiIAIFJxAd4e5BWVpaeu655+68884XXnihZ5c2g0ZAi7gHbURkTzEQ4L11559/Pj0lUQz9VR9FQAREIAIEIivcGZuffvpJLxYJxTmqRdxDMUwyMmIEWC3X/cqLiPVO3REBERCBSBKIjnDHv25HqLq6uqKi4pVXXhkwYIDNVCKwBFauBalF3AM7RDJMBERABERABESg8ASiI9w///xzi5M4mVatWt12223JlpqxJZUIAoGVwl2LuAdhNGRDsRDghuTrr79Ob/faa6+ysuj8FhTL+KmfIiACRUkgOl/W7777blGOYBQ6rUXcozCK6kPYCPAE/yeffILVe+yxR9hsl70iIAIiUKQEovMCJt6c+sMPP7iHkc1ffvnFnaN0MAlMnbcYw7SIezBHR1aJgAiIgAiIgAgEhEB0hPvAgQNHjx7txjp27Fgy3TlKB5DAH0sr5y5ajmHtWyjGPYDjI5NEQAREQAREQASCQiA6wp0Y9969e7u5brfddl988YU7R+kAEqiocbc3bVTWtJHWpAvg+MgkERABERABERCBoBCIjnBnabMFCxa4uc6bN2/FihXuHKUDSMAs4t6hhZ5MDeDgyCQREAEREAEREIEAEYiOcN9xxx1vvPFGq9RJsNmnT58AwZYpiQhoSZlEVJQnAiIgAiIgAiIgAl4C0VlV5uabb0a7d+3adYcddqCXo0aNmj9//jvvvOPtsbYDRmClcFeAe8AGRuaIgAiIgAiIgAgEjEB0hHu3bt2++uqru++++8svvywvL+/fv//pp5++1lprBQy4zPESmDK3ZkkZhcp4wWhbBPwlUL9+/bPOOos2SPjbkmoXAREQARHIEYHoCHeAtG/f/oYbbsgRGVWTJwIVc5fQkmLc84RbzYjASgI8F9SiRYuVW/pfBERABEQgBASiE+P+8MMPP/30027kbD766KPuHKUDSMAs4t6uuR5ODeDgyCQREAEREAEREIEAEYiOcOdR1JYtW7rRtm7dWg54N5AApquqqo3HXYu4B3B0ZFK0CfAE/xs1H/tMf7T7q96JgAiIQAQIREe4T548uVOnTu4h6dixI5nuHKWDRmDWH0uXragqLXHaNNPDqUEbHNkTcQLo9TE1Hwn3iI+0uicCIhAhAtER7vjXeTjVPTQ8pbr22mu7c5QOGoGpNQHuqPb69aJzKgYNsuwRAREQAREQARGIBoHoqKV+/fqdeeaZ7777Lt4jPiwEyYIJRxxxRDTGKaq9WLkWpALcozrC6pcIiIAIiIAIiEDOCERnVZlrr732l19+2W233crKYp2qqqpiRcjrr78+Z6hUkQ8EJNx9gKoqRUAEREAEREAEokkgOsK9QYMGw4cPv+6667744gvWce/evTsx7tEctAj1auUi7gpwj9CgqisiIAIiIAIiIAL+EIiOcDd8utR8SPPa1HvvvXfo0KGffPKJP+hUaw4IaBH3HEBUFSIgAiIgAiIgAsVBIGrCnVEjzP2hhx567rnnmjdvfvDBBxfHOIa1l1rEPawjJ7tFQAREQAREQATyTiA6wn3KlCmPPPIIr2GaO3funDlznnjiicMPP5xXA+YdqRqsA4GVMe4KlakDNBUVgZwQqF+//imnnEJVJHJSoSoRAREQARHwm0AUVpV59tln9913365duxLdftttt02dOrW0tJQYd6l2v8+eLOtfsnzFrIXLqKRDC60qkyVLHS4CdSbANySr6PLRV2Wd2ekAERABESgQgSh43Pv27XvRRRfxZGrTpk0LhFHNZkKgYt4SDmvcoF7zcjn8MgGoY0RABERABERABIqKQBQ87scff/yQIUP23nvv++67jyCZohq/UHd2ZZxMuRx+oR5HGR9SArzv4r2aD4mQdkFmi4AIiECxEYiCcL///vsrKipOPPHEYcOGtWvX7q9//Wt1dTXruBfbWIauvyvXglScTOiGTgZHgQB6/b81Hwn3KAyn+iACIlAcBKIg3BkpFm4fMGAAv0Fff/31Jpts0qZNm969ex955JGsLVMc4xjKXhqPe4cWejI1lMMno0VABERABERABPJMICLC3VJjGfcbbrjh119/feyxxxYtWtSvXz+7S4mgETCLuLdrLo970EZG9oiACIiACIiACASRQBQeTo3nyqoyB9R8ZsyYEb9XOQEhYBZxb68lZQIyHjJDBERABERABEQg2ASi5nH30GalM0+ONoNDYGWMu0JlgjMmskQEREAEREAERCC4BCIu3IMLvugt4wHilTHuCpUp+rNBAERABERABERABNIgIOGeBiQV8YHAnEXLlyyPrfzTtrk87j7wVZUiIAIiIAIiIAKRIxDNGPfIDVMEO2Tc7a2aNmxYVi+C3VOXRCDwBMrKyk444QTMJBF4Y2WgCIiACIhAjEB0PO7rr7/+77//7h7VuXPnkunOUTo4BFYGuCtOJjhjIkuKiwAP8Xeo+ZAorp6rtyIgAiIQWgLR+b7+5ZdfPK8RWbp06ZQpU0I7NBE3vGLuYnqoRdwjPszqngiIgAiIgAiIQO4IROEO6YsvvmiAvP76682bNzdpRPzbb7+93nrr5Y6VasolganzllCdFnHPJVPVJQJ1IcCX5EcffcQR2223Xb16ilirCzuVFQEREIECEYiCcD/ooIOgV1JSwstTLcb69euj2m+77Tabo0SgCChUJlDDIWOKkADC/a233qLjW2+9tYR7EZ4A6rIIiEAYCURBuFdVxRYn6dSp08cff9yyZcswDkMR2rxyLUgtKVOEg68ui4AIiIAIiIAIZEIgOjHuEydOdKt2nkxNn8eQIUNwzzdq1GjbbbcdN25cwgPvuOOOrl27lpeXr7vuuuecc86SJbFIDz5XX301zn772WijjUy+/qYmYIS7XpuampL2ioAIiIAIiIAIiIAlEB3hfvPNNw8fPtx07LDDDltrrf+3d+dhUlRnvMeZlRm2GVRkF9wVZRMEEYwmsoiGgCsKV8QoBJFHkAcTQIEgAsYFd8RrULxBBTRREx8XiAkaAwSFYFDBB9xQGDYJ2wzDrPfHFFNpexZm6eWcU9/+Q6urq0695/N2D++cOXX6OK2X8Mknn/hdrWhDZ40fP37atGlr167t2LFjv379du7cGXbwSy+9NHHiRB2zYcOG+fPn65TJkyf7x5xzzjlZpY8PP/zQ389GRQJ5BUU7DxzWqxTuFRGxHwEEEEAAAQQQCBNwp3CfN2+exsLVvWXLlmni5jvvvNO/f/+77rorrMNln86ZM2fEiBE333xzu3bt1Ei9evWee+65sMNWrFjRs2fPIUOGaGC+b9++N9xwQ+jAvFZBblb6CB31D2uEp77Ajv25xcV1UpMTj6+f6u9kAwEEEEAAAQQQQKASARfmuHvd2759u1e4v/nmm9ddd53KaxXZmvpSSef1Ul5e3po1ayZNmuQdpvWMe/fuvXLlyrCzLrzwwoULF6pY79at21dfffXWW2/deOON/jGbNm1q0aKFZtr06NFj9uzZJ510kv+Sv6G1KfXwnu7fv18b+SUPb8P/r3eA8//9dvcB9bFFRlpBQYFLnVVK1R3vvy71K4B9CUIq/TeqNjTZz9UsByGVruYurF+kMgzE3qdhqfSe2tudGEfuTuHeuHHj7777TrW7xtrvu+8+ORYXF4et7F4Wd/fu3TqmadOm/kva3rhxo//U29BYu47s1auX2lStOWrUKH+qjH43WLBggaa/a7LM9OnTL7rook8//bRhw4ZhLaig16uhO5cuXarRfX+P/lDgbzu/8dEuVQlJqQUH9SuQe50NVCrdS19oj9xOpf/jUQvpOr+qjNupDH3TOr9NKp1JsZ/KnJwcZzoVg464U7hfddVVKq9PP/10fX+qJsnI7t///vdpp50WEcTly5fPmjVr7ty5KtM3b948duzYGTNmTJkyRY1719JGhw4d9GqbNm2WLFlyyy23hF1Xg/qaSe/t1Ii7fsHQ3wQaNWqkPfpdU2/fPn36aAnLsLNcffrt+1/V2bz53FNaXX75uS71MYCpdCl9oX0JQiq1Hpd+aqnX+nHk8JenBiGVoW9dh7dJpTPJDUulNw3Bmd5FuyPuFO6PPPKI5sZo0P2BBx5o0KCB4DQEPnr06MoFNSVdQ007duzwD9O25qv7T70N1eiaG3Prrbfqafv27bOzs0eOHHn33XeH/WuXmZl5xhlnqLIPO11P65Y8QverTA+t1MOehh7p3vb2A3nqVKvj6ocKONPNQKXSmayV2xHnUxmpoY1y9Yza6XwqjdKOajCkMqq8sWzcT6U2Ynld26/lTuGuxE+YMCE0H1q0MfRpudupqaldunTRd6x63+KkIShtjxkzJuxg/R0ntEb3/qysaTNhhx08ePDLL78Mnf4edgBPPQEWceedgAACCCCAAAIIVFfAnVVl1PM//OEPmoau+0S//fZbPdXK62+88cYxRTSD5dlnn33hhRe01ONtt92m0XStMKOzhg0b5t+0OmDAgKeffnrRokVaLV7TWjQArz1e+a7fFt5///1vvvlGK89ceeWV2qk1Z4550YAfwCLuAX8D0H0TBDTHXTfc6+FPdjchKmJAAAEEEKhEwJ0RdxXWU6dOHTdu3MyZM71/hzRxRbX7wIEDK+m/Xho8ePCuXbt0rtal6dSpk+5t9e5V3bJliz/Kfs8992jVBf1369atTZo0UdWuq3jNfv/996rUNbFe+/Vrw6pVq7RR+RUD/qr+UrH1v4eEwCLuAX8n0P34Cujn5Ntvv60Y9HPPG4aIbzxcHQEEEEDgmALuFO5PPPGEBs414+X+++/3ut21a9ewyTMVcWhuTNnpMboh1T9eK7Xr25f08Pf4GxqG97fZqIrA/tyC7LxCHdkiI70qx3MMAggggAACCCCAgATcmSqjSSydO3cOTapuB9W8l9A9bJsg4M2TOa5+anpqkgnxEAMCCCCAAAIIIGCFgDuF+8knn7xu3bpQdE16Ofvss0P3sG2CQNY+b55MmgnBEAMCCCCAAAIIIGCLgAtTZe69915NidE9prfffntubq6mUOt2q5dfflnfefT73//elkwEJ86te3PV2ebMkwlOyukpAggggAACCERCwIXCXd9Iqq8y1SLr6enpun9USzfqm5i0tsxjjz12/fXXR0KJNiIpULoWJBPcI6lKWwgggAACCCDgvIALhbu/nvrQkocKd62nfuKJJzqfPEs7WLoWJFNlLE0gYSOAAAIIIIBAfARcKNwlp7Uafb96JQ//KRumCZQW7oy4m5YZ4gmWgBbL8r50QhvB6jm9RQABBKwVcOTn9RlnnBFau4emY8+ePaFP2Y67wLaSOe4s4h73RBBAwAX0PRX6yRlwBLqPAAII2CXgSOGuae4ZGRl20Qcz2oLCou37j9yc2jKTEfdgvgXoNQIIIIAAAgjUUMCRwl03oTKpvYZvgdietvPA4cKi4pSkhCYN6sb2ylwNAQR+JKBvTl2/fr12tW/fnm9O/RENTxBAAAFTBVwo3CuaJGOqeaDj8hZxb5aRlpj4v9sSAi1C5xGIk4AK9zfeeEMXb9euHYV7nJLAZRFAAIHqCbjwBUz+qjLV6zpHx0OARdzjoc41EUAAAQQQQMAFARdG3IuKilxIRTD6wCLuwcgzvUQAAQQQQACByAu4MOIeeRVajJpA6VqQLOIeNWIaRgABBBBAAAFHBSjcHU2sqd0qLdxZUsbUDBEXAggggAACCJgqQOFuamYcjcub484i7o6ml24hgAACCCCAQBQFKNyjiEvTZQWY417WhD0IIIAAAggggEBVBFy4ObUq/eQYEwSyDxfsO5SvSJpnMMfdhIQQQ6AFkpOTr7nmGhFoI9AQdB4BBBCwR4Cf1/bkyv5IvUXcG6UlN0xLsb839AABuwUSExPPOeccu/tA9AgggEDABJgqE7CEx7W7THCPKz8XRwABBBBAAAG7BRhxtzt/dkXPkjJ25Yto3RbQN2Bs2LBBfTz77LM1+u52Z+kdAggg4IYAP6zdyKMdvSgt3Jngbke+iNJtgYKCgldLHtpwu6f0DgEEEHBGgMLdmVRa0JGtew8pStaCtCBVhIgAAggggAAC5glQuJuXE3cjYi1Id3NLzxBAAAEEEEAg6gIU7lEn5gK+wLa9udpmxN0HYQMBBBBAAAEEEKi6AIV71a04slYCRUXF3nKQFO61cuRkBBBAAAEEEAiqAIV7UDMf837vzj6cX1icmFCnacO6Mb84F0QAAQQQQAABBKwXoHC3PoW2dMCbJ9NUX7+UxLvOlqQRJwIIIIAAAggYJMA67gYlw+1QSteCTHe7m/QOAVsEkpKSBg4cqGi1YUvMxIkAAggEXIDCPeBvgNh1n8I9dtZcCYEqCKhe79SpUxUO5BAEEEAAAVMEmLRgSiacj6N0EXe+fcn5VNNBBBBAAAEEEIiKACPuUWGl0bICLOJe1oQ9CMRRoKioaPPmzQrgtNNOS0xkECeOqeDSCCCAQFUF+GFdVSmOq6XA0UXcM5jjXktITkcgMgIFBQUvlzy0EZkWaQUBBBBAIMoCFO5RBqb5UgHmuJdK8H8EEEAAAQQQQKAmAhTuNVHjnOoK5OYX/pCdp7NaZDLHvbp4HI8AAggggAACCBwRoHDnfRALgax9ubpMvdSkjPSUWFyPayCAAAIIIIAAAs4JULg7l1IjO+TPk0lISDAyQIJCAAEEEEAAAQRMF6BwNz1DbsRXuhYkd6a6kU96gQACCCCAAAJxEKBwjwN6AC9ZuhYkE9wDmHy6jAACCCCAAAKREWAd98g40krlAkenyrAWZOVMvIpADAX0zan9+/fXBbURw8tyKQQQQACBmgtQuNfcjjOrLnB0EfdMpspU3YwjEYiugOr1bt26RfcatI4AAgggEFEBpspElJPGKhDYtu+QXmlB4V6BD7sRQAABBBBAAIFjCjDifkwiDqitQHFxcemqMsxxry0m5yMQKYGioqItW7aotZNOOikxkUGcSLnSDgIIIBBFAX5YRxGXpj2B/+bk5+YXabtZBoU7bwoETBEoKCh4oeShDVNiIg4EEEAAgUoFKNwr5eHFSAh4w+1NGtatm8w9cJEApQ0EEEAAAQQQCKQAhXsg0x7bTrOIe2y9uRoCCCCAAAIIuClA4e5mXo3qFYu4G5UOgkEAAQQQQAABSwUo3C1NnE1hs4i7TdkiVgQQQAABBBAwVYDC3dTMOBQXi7g7lEy6ggACCCCAAAJxE6Bwjxt9cC7MIu7ByTU9RQABBBBAAIHoCbCOe/RsafmoAIu481ZAwEABfXNq7969FZg2DAyPkBBAAAEEygpQuJc1YU8kBfIKinYeOKwW+drUSLLSFgK1FlC93rNnz1o3QwMIIIAAArETYKpM7KyDeaUd+3OLi+ukJiceXz81mAL0GgEEEEAAAQQQiIgAI+4RYaSRCgW8RdxbZqYnJCRUeBAvIIBAzAWKioqysrJ02ebNmycmMogT8wRwQQQQQKD6Avywrr4ZZ1RHgAnu1dHiWARiJ1BQUPD7koc2YndVroQAAgggUAsBCvda4HFqFQRYxL0KSByCAAIIIIAAAggcW4DC/dhGHFEbga17c3U6d6bWxpBzEUAAAQQQQAABCVC48zaIrkDWvkO6gOa4R/cytI4AAggggAACCLguQOHueobj3T9vqkzzzLR4B8L1EUAAAQQQQAABuwUo3O3On+HRFxcXb/3vkRF3psoYninCQwABBBBAAAHzBSjczc+RxRHuzy3IzitUB1pkMFXG4jwSOgIIIIAAAgiYIMA67iZkwdkYvHkyx9VPTU/lO9WdzTIds1RA35x68cUXK3htWNoFwkYAAQSCJkDhHrSMx7S/LOIeU24uhkB1BFSvX3LJJdU5g2MRQAABBOIswFSZOCfA7cuziLvb+aV3CCCAAAIIIBBLAUbcY6kduGuxiHvgUk6H7RHQveO7du1SvE2aNElISLAncCJFAAEEgivAiHtwcx+DnrOIewyQuQQCNRPIz89/uuShjZq1wFkIIIAAAjEWoHA/Av7UU0+1bds2LS2te/fuq1evLjcHjz766Jlnnpment66des777wzN/fIF4KGPu6//36NWo0bNy50Z8C3WcQ94G8Auo8AAggggAACERSgcK+zePHi8ePHT5s2be3atR07duzXr9/OnTvDiF966aWJEyfqmA0bNsyfP1+nTJ48OfSYjz766JlnnunQoUPoTra37T3y6w2LuPNOQAABBBBAAAEEai9A4eHP9oEAACUdSURBVF5nzpw5I0aMuPnmm9u1azdv3rx69eo999xzYbIrVqzo2bPnkCFDNDDft2/fG264IXRg/uDBg0OHDn322WcbN24cdmKQnxYUFm3ff6Rwb5nJIu5BfiPQdwQQQAABBBCIjEDQb07Ny8tbs2bNpEmTPM7ExMTevXuvXLkyTPfCCy9cuHChivVu3bp99dVXb7311o033ugfc/vtt19xxRU68b777vN3hm0cLnl4O/fv368NzSv1ppaG/jfsLKufZu3LLSwqTklKyKyb6PXR6u5UJXhXU1mVvjt2TBBS6X8qteHwzalBSKVjn76KukMqK5Kxbn9YKr2n1vUiXgEHvXDfvXt3YWFh06ZN/QRoe+PGjf5Tb0Nj7TqyV69eWoehoKBg1KhR/lSZRYsWaY6NpsqEnRL2dPbs2dOnTw/duXTpUo3u+3uWLVvmb7ux8dWRX0+SGyUXvfPO2270qIq9cC+VVey4e4e5nUr96PNS9u677zr/HUxup9K9j14lPSKVleDY9ZKfypycHLsij2+0QS/cq6i/fPnyWbNmzZ07V3evbt68eezYsTNmzJgyZcp3332nbb35dGNr5U1pUF8z6b1jNOKuO1w15aZRo0bao9811UKfPn1SUlIqb8SuV//yn6w6n60/tflxl19+vl2R1zhaV1NZYxB7TwxCKvX3xvXr1ytHurEnNTXV3mRVHnkQUlm5gDOvkkpXU+lNQ3Cmd9HuSNAL9xNOOEFDTTt27PChtd2sWTP/qbehGl1zY2699VY9bd++fXZ29siRI++++25Ns9GdrOedd553mEawPvjggyeffFLzYsJGsOqWPEKbVZkeWqmHPQ090tLtHQeOrDHXqnG90G5a2pdqhe1eKqvVfZcOdjuVmhnYo0cP5UvjDmE/r1xKotcXt1PpXr4q6RGprATHrpf8VGrDrsjjG23QC3eNM3Xp0uW9994bNGiQMlFUVKTtMWPGhGVFf8fRP3L+Tu8fOU2bufTSS70hK+8l3eF61lln/eY3v3H+X0GfopINbxF3lpSphIiXEIijgH5M6e9+cQyASyOAAAIIVFcg6IW7vDSD5aabburatatuPNVi7RpNV/2t/cOGDWvZsqXmpmt7wIABWnymc+fO3lQZDcBrj/7Za9iw4bnnnuuj169f//jjjw/d478UwA0WcQ9g0ukyAggggAACCERPgMK9zuDBg/W931OnTt2+fXunTp3eeecd717VLVu2+KPs99xzj1Zd0H+3bt2qrwdX1T5z5szoZcWNlreyiLsbiaQXjgrob4b79u1T5zIyMhxeVcbR7NEtBBAIqACF+5HEa25M2ekxuiHVf1MkJyfr25f08PeUuxF6SrkHBGqnN+LOIu6BSjqdtUhAt/o99thjCli3zjt8c6pFGSFUBBBA4JgC/5u3fcxDOQCBqgscPFyw79CRm1ObZxxjvZ2qt8mRCCCAAAIIIIBAkAUo3IOc/Sj2PWvvIbXeKC25YRp3i0fRmaYRQAABBBBAIDgCFO7ByXVMe7q1pHBnSZmYonMxBBBAAAEEEHBagMLd6fTGr3PbSu5MZYJ7/DLAlRFAAAEEEEDANQEKd9cyakh/vEXcm2cywd2QhBAGAggggAACCFgvQOFufQrN7ABTZczMC1EhgAACCCCAgL0CLAdpb+6Mjpy1II1OD8EhUKeOvqdCXzwnCf8LK1BBAAEEEDBcgMLd8ATZGp43x52bU23NH3EHQEBfT3HFFVcEoKN0EQEEEHBHgKky7uTSnJ4UFRV7c9wp3M1JCpEggAACCCCAgO0CjLjbnkET49998HB+YXFiQp2mDeuaGB8xIYBAnTrFxcU5OTmSqFevXkJCAiQIIIAAAuYLMOJufo7si9C7M7WZvn4piTeYfekj4oAI5OfnP1Ty0EZAukw3EUAAAdsFqKtsz6CJ8Wfty1VYzJMxMTfEhAACCCCAAALWClC4W5s6gwP3lpRpnplucIyEhgACCCCAAAIIWCZA4W5ZwqwIt3QRd759yYp0ESQCCCCAAAII2CFA4W5HnuyKkkXc7coX0SKAAAIIIICAFQIU7lakybIgjy7insFUGcsSR7gIIIAAAgggYLIAhbvJ2bE1Nm/EnZtTbc0fcSOAAAIIIICAkQKs425kWmwOKje/8IfsPPWgJTen2pxHYndeIDExsWPHjuqmNpzvLB1EAAEE3BCgcHcjjwb1whtur5+a1Cidd5dBeSEUBMIEkpOTBw0aFLaTpwgggAACJgsw0GJydqyMzV/Ene9itDJ/BI0AAggggAACpgowJmpqZqyNy1sLkkXcrU0ggQdFoLi42PvO1JSUFH7NDkrW6ScCCFguwIi75Qk0L/zStSBZxN283BARAiECqtpnlzy88j3kFTYRQAABBAwVoHA3NDH2hnV0SRnWgrQ3hUSOAAIIIIAAAkYKULgbmRabgzq6iDtLyticRGJHAAEEEEAAAQMFKNwNTIrdIbGIu935I3oEEEAAAQQQMFWAwt3UzNgZl253825OZRF3OxNI1AgggAACCCBgrgCFu7m5sTGyPdl5hwuKEhLqNM2oa2P8xIwAAggggAACCBgrQOFubGqsDMxbxL1Jg7p1k5Os7ABBI4AAAggggAACpgqwjrupmbEzLhZxtzNvRB1EgcTExHbt2qnn2ghi/+kzAgggYKEAhbuFSTM4ZBZxNzg5hIbAjwSSk5OvvfbaH+3iCQIIIICA2QIMtJidH9uiYxF32zJGvAgggAACCCBgjQCFuzWpsiJQFnG3Ik0EiQACCCCAAAI2CjBVxsasmRuzN8e9Bd++ZG6KiAyBowJ5eXmzZ8/Wk0mTJqWmpuKCAAIIIGC+ACPu5ufIpghL57in2xQ0sSKAAAIIIIAAAjYIULjbkCVLYswrKNp18LCCbZGZZknIhIkAAggggAACCFgjQOFuTarMD3TH/tzi4jp1kxOPq8+f3c1PFxEigAACCCCAgGUCFO6WJczkcP0J7gn66lQeCCCAAAIIIIAAAhEVoHCPKGewGzu6FiTzZIL9NqD3CCCAAAIIIBAlAQr3KMEGsVkWcQ9i1ukzAggggAACCMRKgOUgYyUdgOts3ZurXrIWZABSTRddEEhMTDz99NPVE2240B/6gAACCARAgMI9AEmOVRdZCzJW0lwHgQgIJCcnDxkyJAIN0QQCCCCAQKwEGGiJlXQArlM6x51F3AOQbLqIAAIIIIAAAjEXoHCPObmjFywuLi4t3FnE3dEc0y0EEEAAAQQQiKsAU2Xiyu/QxffnFmTnFapDzHF3KKt0xWWBvLy8hx56SD2cMGFCairfveByrukbAgg4I0Dh7kwq49wRb7hdX72UlpIU51C4PAIIVE0gPz+/agdyFAIIIICAEQJMlTEiDQ4EwTwZB5JIFxBAAAEEEEDAZAEKd5OzY1NsRwv3DO5MtSlrxIoAAggggAACFglQuFuULKNDZRF3o9NDcAgggAACCCBgvwCFu/05NKMH3oh7y0xG3M3IB1EggAACCCCAgHMCFO7OpTROHSqd407hHqcEcFkEEEAAAQQQcF2AVWVcz3Cs+ldauLOIe6zEuQ4CtRNISEho06aN2tBG7VribAQQQACBGAlQuMcI2u3LFBQW7ThwWH1kEXe3E03vXBJISUkZPny4Sz2iLwgggIDzAkyVcT7FsejgzgOHC4uKU5ISmjSoG4vrcQ0EEEAAAQQQQCB4AhTuwct5FHrszZNplpGWmMjf3KPgS5MIIIAAAggggECdOkyV4V0QAYGtew+plRYs4h4BS5pAIEYCeXl5jz32mC42duzY1NTUGF2VyyCAAAII1EKAwr0WeJxaKrBtb642WQuy1IP/I2CHQE5Ojh2BEiUCCCCAQIkAU2V4I0RAoHRJGdaCjAAmTSCAAAIIIIAAAuUKULiXy8LO6glQuFfPi6MRQAABBBBAAIHqC1C4V9+MM8oIHJ3jnski7mVo2IEAAggggAACCERIgMI9QpDBbiZr35E57iziHux3Ab1HAAEEEEAAgegKULhH1zcIrR88XLDvUL562jyDEfcgJJw+IoAAAggggEB8BFhVJj7uLl01q2QtyEZpyQ3TUlzqF31BwG2BhISEFi1aqI/acLun9A4BBBBwRoDC3ZlUxq0jpRPcWVImbingwgjUQCAlJWXEiBE1OJFTEEAAAQTiJcBUmXjJu3NdFnF3J5f0BAEEEEAAAQQMFqBwP5Kcp556qm3btmlpad27d1+9enW5+Xr00UfPPPPM9PT01q1b33nnnbm5R27H1OPpp5/u0KFDo5JHjx493n77bW9/cP7LWpDByTU9RQABBBBAAIE4ClC411m8ePH48eOnTZu2du3ajh079uvXb+fOnWEpeemllyZOnKhjNmzYMH/+fJ0yefJk75hWrVrdf//9a9as+fjjj3/2s58NHDjws88+Czvd7acU7m7nl965KpCfn6/xCD204Wof6RcCCCDgmACFe505c+ZooufNN9/crl27efPm1atX77nnngtL84oVK3r27DlkyBANzPft2/eGG27wB+YHDBhw+eWXn3766WecccbMmTMbNGiwatWqsNPdfrpt3yF1sAWLuLudZnrnnEBxcfG+koc2nOscHUIAAQTcFAh64Z6Xl6fB8t69e3vpTUxM1PbKlSvDsn3hhRfqMK9Y/+qrr9566y0V62HHFBYWLlq0KDs7WxNmwl5y+6k3x51F3N3OMr1DAAEEEEAAgbgLBH1Vmd27d6vgbtq0qZ8JbW/cuNF/6m1orF1H9urVS0NTBQUFo0aN8qfK6ID169erWNesdw23v/baaxq5DztdTw+XPLz9+/fv14b+PO39hTr0v2VPNHxPUVFxVsmI+4n1k72OGB5wVMOzOpVRlbGu8SCk0v/AasPhFSGDkErrPl81C5hU1szNwLPCUuk9NTBOM0MKeuFexawsX7581qxZc+fO1d2rmzdvHjt27IwZM6ZMmeKdrptW161bp785v/rqqzfddNP7779ftnafPXv29OnTQy+3dOlSTcvx9yxbtszftmhjX16d/MLkhDrFa/7593UsBl2SOUtTadG7Lmahup1KjVl4ku+++25SUlLMVONyIbdTGRfSeF2UVMZLPuLX9VOZk5MT8cYdbjAh4LMbNVVG1bMK7kGDBnlpVuW9d+/eN954IzTrF1100QUXXPDggw96OxcuXDhy5MiDBw9qak3oYdrWTJtTTz31mWeeCdsfNuKupWk0hK+laHSYftfU27dPnz5aVjnsLPOfrvtu77X/d7W+M/WDCT8xP9poR2h1KqONY1f7QUilfvo99NBDysuECRNSU1PtSlDVow1CKquuYfWRpNLq9IUGH5ZKTUM44YQTNPrpFUWhR7JdViDoI+7656pLly7vvfeeV7gXFRVpe8yYMWFS+nUwtEb3RqfK/Z1HLahGDztdT+uWPEL3q0wPrdTDnoYeafL2zoMFCq9lZnpoX0wOOAaxWZrKGMhYdwm3U+n/BHO7m967Lgh9tO7zVbOASWXN3Aw8y0+lNgwMz9iQgl64KzFaC1Kj7F27du3WrZtWRtPdpVphRvuHDRvWsmVLTXHRtpaO0eIznTt39qbKaJKM9njl+6RJk/r373/SSScdOHBAq0ZqUo3+7mxsviMeGGtBRpyUBhGIjYDmtTdp0kTXcniCe2wkuQoCCCAQMwEK9zqDBw/etWvX1KlTt2/f3qlTp3feece7V3XLli3+KPs999yjf9v0361bt+qfOlXtWvnRS5IWfVeJn5WVlZGRoW9iUtWuSS8xy1/cL7R1r7cWZHrcIyEABBColoBGuUaPHl2tUzgYAQQQQCC+AhTuR/w1N6bs9BiNnfu5SU5O1rcv6eHv8Tf0fUz+dgA3vCVlWrKIewBzT5cRQAABBBBAILYC4fdWxvbqXM16AW8R9+YZjLhbn0o6gAACCCCAAAKGCzDibniCTA+POe6mZ4j4EKhAQAs7PPvss3pRXx3NzWEVILEbAQQQMEuAwt2sfNgVTW5+4Q/ZeYpZq8rYFTnRIoCAVpXR7T1y8JeXwQQBBBBAwHABpsoYniCjw/OG2+unJjVK5zdAozNFcAgggAACCCDggACFuwNJjFsXvAnuLTLTWU4ubjngwggggAACCCAQGAEK98CkOgodZYJ7FFBpEgEEEEAAAQQQKF+Awr18F/ZWRYBF3KuixDEIIIAAAggggEBEBCjcI8IY0EZYxD2giafbCCCAAAIIIBAPAe4pjIe6K9dkEXdXMkk/giigW1P0fc/qOfeoBDH99BkBBOwUoHC3M29mRM0cdzPyQBQI1ERAa7ePGzeuJmdyDgIIIIBAnASYKhMnePsvq7WfvTnuLOJufzLpAQIIIIAAAghYIEDhbkGSzAxxT3be4YKihIQ6TTPqmhkhUSGAAAIIIIAAAi4JMFXGpWzGtC/eBPcmDerWTU6K6YW5GAIIREIgPz9/wYIFamn48OGaNhOJJmkDAQQQQCC6AhTu0fV1uHXWgnQ4uXQtCAKa7bZt2zb1VBtB6C99RAABBBwQYKqMA0mMTxe8O1OZ4B4ffa6KAAIIIIAAAsEToHAPXs4j1GNvEfcWmWkRao9mEEAAAQQQQAABBCoTYKpMZTpWv1ZYVLz66z07D+Se2DCt28nHJSUmRLA7anz99/vU4OH8Im1HtvEIxklTCCCAAAIIIICAMwIU7s6k8kcdeefTrOl/+TxrX663t3lG2rQB7S47t/mPDqrpk9DG/9+qb5dt2BHBxmsaFOchgAACCCCAAAKOCzBVxsEEq7C+beFav2pXD7fvy9Ue7a99b6PaeO3DowUEEEAAAQQQQMBVAUbcXcusJq5orD1skQg91USZ3/75856nnVCbaS1qfNqfPyu3cV20T7tmtWnctUzQHwSMF6hXr57xMRIgAggggMD/BCjc/2fhxpbmtYeOtfudUrW9fX9u+98u9fdEcEON66K6dI9Tj49gszSFAALRE0hNTb3rrrui1z4tI4AAAghEXICpMhEnjXODuhs1XhHE8dLx6jLXRQABBBBAAAEEYibAiHvMqGN0Ia0hU8mVFtx8vlaYqeSAyl/SmPrw5z+q6JjKL13RWexHAAEEEEAAAQQQqIoAhXtVlGw6RnW51pDR3ahhM9E1x71ZRtpFpzepzTR0nV5J47X5lcAmYmJFwAmB/Pz8F198UV0ZOnRoSkqKE32iEwgggIDjAkyVcS3Bqsu1OKN6Fbpsu7et/bWp2tVmVBt3LRP0BwGzBYqLi78teWjD7EiJDgEEEEDgqACFu4NvBa3X/vT/OU/j637ftK09EVnHPaqN+wGzgQACCCCAAAIIIBAmwFSZMBBHnqq81uKMUfrm1Kg27kgC6AYCCCCAAAIIIBBpAQr3SIsa056mtURvccaoNm4MIYEggAACCCCAAAIGCTBVxqBkEAoCCCCAAAIIIIAAAhUJULhXJMN+BBBAAAEEEEAAAQQMEmCqjEHJIBQEEEAglgKsAhlLba6FAAII1F6Awr32hrSAAAII2CeQmpo6efJk++ImYgQQQCDAAkyVCXDy6ToCCCCAAAIIIICAPQIU7vbkikgRQAABBBBAAAEEAizAVJkAJ5+uI4BAgAUKCgqWLFkigOuuuy45mX8LAvxWoOsIIGCPAD+s7ckVkSKAAAKREygqKtq0aZPa00bkWqUlBBBAAIEoCjBVJoq4NI0AAggggAACCCCAQKQEKNwjJUk7CCCAAAIIIIAAAghEUYDCPYq4NI0AAggggAACCCCAQKQEKNwjJUk7CCCAAAIIIIAAAghEUYDCPYq4NI0AAggggAACCCCAQKQEWFUmUpLVaKe4uFhH79+/3zsnPz8/JydHT/n68WogGnkoqTQyLTUJKgipzMvLy83NlY5++OhbVGvCZMM5QUilDXmIQIykMgKIZjQRlkqvHPJKIzMCNDqKBKRin5/vv/++devWsb8uV0QAAQQQQAABBAwU+O6771q1amVgYKaFROEeh4xo1eRt27Y1bNgwISFBl9fvmqrj9ZZt1KhRHKLhkpETIJWRs4xzS6QyzgmI3OVJZeQs49wSqYxzAiJ3+bBUagT5wIEDLVq0SExk/vaxlZkqc2yjiB+ht2bZXytVtVO4R5w6Lg2SyriwR+OipDIaqnFpk1TGhT0aFyWV0VCNS5uhqczIyIhLDDZelF9ubMwaMSOAAAIIIIAAAggEToDCPXApp8MIIIAAAggggAACNgok/fa3v7UxbsdiTkpKuuSSS5KTmblkfWJJpfUpLO0AqSyVsP7/pNL6FJZ2gFSWSlj/f1JZ4xRyc2qN6TgRAQQQQAABBBBAAIHYCTBVJnbWXAkBBBBAAAEEEEAAgRoLULjXmI4TEUAAAQQQQAABBBCInQCFe+ysuRICCCCAAAIIIIAAAjUWoHCvMR0nIoAAAggggAACCCAQOwEK99hZl3ulp556qm3btmlpad27d1+9enW5x7DTcAEtzaQvwfUfZ511luEBE16YwAcffDBgwAB9b5+S+Prrr/uv6vv8pk6d2rx58/T09N69e2/atMl/iQ0zBSpK5fDhw/1PqDYuu+wyM+MnKk9g9uzZ559/vr5f/MQTTxw0aNAXX3zhy+Tm5t5+++3HH398gwYNrr766h07dvgvsWGmQCXZ1Hp6oR/MUaNGmdkFo6KicI9nOhYvXjx+/Php06atXbu2Y8eO/fr127lzZzwD4to1FTjnnHOySh8ffvhhTZvhvPgIZGdn6wOo36LDLv/AAw88/vjj8+bN+9e//lW/fn19QlU0hB3DU6MEKkqlglSxXvoZzXr55ZeNCptgwgTef/99VeerVq1atmxZfn5+3759lVnvmDvvvPMvf/nLK6+8omO2bdt21VVXhZ3LU9MEKsmmQh0xYoT/wdSPXNOCNzEeDSnxiJdAt27d9LPJu3phYaEG/PSLabyC4bo1FtCvXir7anw6J5ojoJ/Rr732mhdPUVFRs2bNHnzwQe/p3r1769atq4LPnGiJpBKB0FTqsJtuumngwIGVHM9Lxgp441kq/hShPoYpKSmq2r1oN2zYoESvXLnS2OAJLEwgNJt66eKLLx47dmzYMTytXIAR97j9NpWXl7dmzRr9/d2LIDExUdv6ARS3gLhwLQQ0iUK/d51yyilDhw7dsmVLLVriVFMEvv766+3bt/uf0IyMDM1n4xNqSnqqH8fy5cs17+LMM8+87bbbfvjhh+o3wBnxEdi3b58ufNxxx+m/+kdTA/D+p1LzEk866SQ+lfFJTI2uGppNr4EXX3zxhBNOOPfccydNmpSTk1OjVoN1El/VGbd87969W6PsTZs29SPQ9saNG/2nbNgioHpuwYIFKgj0977p06dfdNFFn376qWZn2hI/cZYroKpd+8M+od7Oco9np8kCmiejORUnn3zyl19+OXny5P79+6va03c3mhwzsUlAf/gaN25cz549VdjpqT6AqampmZmZPo4+oXwqfQ3DN8KyqWiHDBnSpk0bDXv95z//+c1vfqObGf70pz8Z3ou4h0fhHvcUEID1AioCvD506NBBRbx+DC1ZsuSWW26xvmN0AAFXBK6//nqvK+3bt9fn9NRTT9UA/KWXXupK/5zth2aTahyEG4fcSHDZbI4cOdL/YGoZAH0k9au1Pp5u9DdKvWCqTJRgj92s/jak8Z7QO+K1rTm1xz6TIwwW0FDQGWecsXnzZoNjJLQqCXgfRj6hVcKy6iBNadOPXz6k5idtzJgxb7755t///vdWrVp50epTqVmmmunuB8+/mz6F4RtlsxkWsIa9tIcPZhhL2acU7mVNYrRHf+/r0qXLe++9511Pf0LSdo8ePWJ0eS4THYGDBw9qwEAjB9FpnlZjJ6BpFaoS/E/o/v37tbYMn9DYJSBqV/r+++81x50PadSAI9Cwbs5Tnac7xf/2t7/pk+i3qH80dXOq/6nUzArdU8Sn0vcxc6OibIZFu27dOu3hgxnGUvZpkpagLruXPbERaNSo0ZQpU1q3bq3VKrShd+38+fO1Nm1srs5VIiUwYcIEZVCtff7551qGVnfNawFBrR4YqfZpJ9oC+nVLudNM2WeeeUajPlq1XaN6+uOJ7kKZNWtWu3bt9PSOO+7QjVNPPPFEcjIzDKOdkJq3X24q9bfNu+++Wz9vCwoKdHejprHpx+zDDz9MKmsOHeUzNadC9yy++uqrmv2snOqhJKpk13eeaAnIJ598slOnTnv27PnVr36lf0C1rleUw6H5WglUlE0NcmkRXn0Y9QNW38Cgfz01k23ixIm1ulgQTq580RlejbaA6gDdFK/Rdy0NqTVro3052o+GwODBgzVIoCS2bNlS2/pLXzSuQpvRE9Df4sN+2mv1QF1OfwfTb9S6+02/mGnypYb3ohcDLUdEoNxU6jcuLQTepEkTVX66BUXrRuuXtIhcjkaiJBD2edTT559/3rvWoUOHRo8e3bhx43r16l155ZVaEiBKMdBspAQqyqb+WvKTn/xE6wXpB+xpp5121113ac2ZSF3U4XYS1LeypuxBAAEEEEAAAQQQQAABowSY425UOggGAQQQQAABBBBAAIHyBSjcy3dhLwIIIIAAAggggAACRglQuBuVDoJBAAEEEEAAAQQQQKB8AQr38l3YiwACCCCAAAIIIICAUQIU7kalg2AQQAABBBBAAAEEEChfgMK9fBf2IoAAAggggAACCCBglACFu1HpIBgEEEAAAQQQQAABBMoXoHAv34W9CCCAAAIIIIAAAggYJUDhblQ6CAYBBBCoocCuXbtuu+02fROzvoawWbNm/fr1++c//6m2EhISXn/99Ro2ymkIIIAAAiYJJJsUDLEggAACCNRQ4Oqrr87Ly3vhhRdOOeWUHTt2vPfeez/88EMN2+I0BBBAAAEjBRhxNzItBIUAAghUR2Dv3r3/+Mc/fve73/30pz9t06ZNt27dJk2a9Itf/KJt27Zq5sorr9S4u7etp2+88cZ5552XlpamEn/69OkFBQXepXTM008/3b9///T0dL306quvevv1+8CYMWOaN2+uU9T47Nmzvf38FwEEEEAgxgIU7jEG53IIIIBA5AUalDw0Jebw4cOhrX/00Ud6+vzzz2dlZXnbqu+HDRs2duzYzz///JlnnlmwYMHMmTP9U6ZMmaKR+08++WTo0KHXX3/9hg0b9NLjjz/+5z//ecmSJV988cWLL77o/wLgn8UGAggggEBsBBKKi4tjcyWuggACCCAQPYE//vGPI0aMOHTokEbTL774YpXdHTp00OU0jv7aa68NGjTIu3Tv3r0vvfRSjcd7TxcuXPjrX/9627Zt3pGjRo3SoLv30gUXXKCm5s6de8cdd3z22Wd//etf1ZT3Ev9FAAEEEIiLACPucWHnoggggECEBTRSrvpbQ+OXXXbZ8uXLVXNrNL3sNTSafu+993oj9Pqvan0Nxufk5HhH9ujRwz9F296I+/Dhw9etW3fmmWeqgl+6dKl/ABsIIIAAAjEWoHCPMTiXQwABBKIloDnoffr00XSXFStWqNqeNm1a2SsdPHhQ89pViHuP9evXb9q0SSeWPdLfo98Bvv766xkzZmg4/7rrrrvmmmv8l9hAAAEEEIilAIV7LLW5FgIIIBAjgXbt2mVnZ+tiKSkphYWF/lVVhWuq+mk/fiQmHv23YNWqVf6R2j777LO9p40aNRo8ePCzzz67ePFizcnZs2ePfxgbCCCAAAIxE2A5yJhRcyEEEEAgWgJa+fHaa6/95S9/qXntDRs2/Pjjjx944IGBAwfqerqXVEtD9uzZU+u7N27ceOrUqT//+c+13LsGzlWva+bMp59+et9993mRvfLKK127du3Vq5duQl29evX8+fO1f86cOVpSpnPnzjpeB2iR+MzMzGj1hHYRQAABBCoWoHCv2IZXEEAAAUsENFu9e/fujzzyyJdffpmfn9+6dWtNXp88ebLCf/jhh8ePH6/B8pYtW37zzTf6YqY333xT09y1dqQG488666xbb73V76Vm0SxatGj06NGq1F9++WUN2+sl/SagXwM0oyYpKen8889/6623/BF6/0Q2EEAAAQRiIMCqMjFA5hIIIICABQJh689YEDEhIoAAAgETYI57wBJOdxFAAAEEEEAAAQTsFKBwtzNvRI0AAggggAACCCAQMAGmygQs4XQXAQQQQAABBBBAwE4BRtztzBtRI4AAAggggAACCARMgMI9YAmnuwgggAACCCCAAAJ2ClC425k3okYAAQQQQAABBBAImACFe8ASTncRQAABBBBAAAEE7BSgcLczb0SNAAIIIIAAAgggEDABCveAJZzuIoAAAggggAACCNgpQOFuZ96IGgEEEEAAAQQQQCBgAhTuAUs43UUAAQQQQAABBBCwU4DC3c68ETUCCCCAAAIIIIBAwAQo3AOWcLqLAAIIIIAAAgggYKcAhbudeSNqBBBAAAEEEEAAgYAJULgHLOF0FwEEEEAAAQQQQMBOAQp3O/NG1AgggAACCCCAAAIBE6BwD1jC6S4CCCCAAAIIIICAnQIU7nbmjagRQAABBBBAAAEEAiZA4R6whNNdBBBAAAEEEEAAATsFKNztzBtRI4AAAggggAACCARMgMI9YAmnuwgggAACCCCAAAJ2ClC425k3okYAAQQQQAABBBAImACFe8ASTncRQAABBBBAAAEE7BSgcLczb0SNAAIIIIAAAgggEDABCveAJZzuIoAAAggggAACCNgp8P8BhKg2I1BqsPcAAAAASUVORK5CYII=)\n" - ], - "metadata": { - "id": "xHF95Kr4CzGq" - } - }, - { - "cell_type": "markdown", - "source": [ - "\n", - "# Installation\n", - "\n", - "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq` from the extra packages.\n", - "\n", - " ```bash\n", - " pip install adalflow[openai,groq]\n", - " ```\n", - "2. Setup `openai` and `groq` API key in the environment variables\n", - "\n", - "You can choose to use different client. You can import the model client you prefer. We support `Anthropic`, `Cohere`, `Google`, `GROQ`, `OpenAI`, `Transformer` and more in development. We will use OpenAI here as an example.Please refer to our [full installation guide](https://adalflow.sylph.ai/get_started/installation.html)" - ], - "metadata": { - "id": "Kof5M6DRaKhh" - } - }, - { - "cell_type": "code", - "execution_count": 42, - "metadata": { - "id": "tAp3eDjOCma1" - }, - "outputs": [], - "source": [ - "from IPython.display import clear_output\n", - "\n", - "!pip install -U adalflow[openai] # also install the package for the model client you'll use\n", - "!pip install datasets\n", - "clear_output()" - ] - }, - { - "cell_type": "markdown", - "source": [ - "## Set Environment Variables\n", - "\n", - "Run the following code and pass your api key.\n", - "\n", - "Note: for normal `.py` projects, follow our [official installation guide](https://lightrag.sylph.ai/get_started/installation.html).\n", - "\n", - "*Go to [OpenAI](https://platform.openai.com/docs/introduction) to get API keys if you don't already have.*" - ], - "metadata": { - "id": "KapUyHMM07pJ" - } - }, - { - "cell_type": "code", - "source": [ - "import os\n", - "\n", - "from getpass import getpass\n", - "\n", - "# Prompt user to enter their API keys securely\n", - "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", - "\n", - "\n", - "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", - "\n", - "print(\"API keys have been set.\")" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "ONfzF9Puzdd_", - "outputId": "e5c3cfc5-69cb-448a-c248-a8cebda5ba71" - }, - "execution_count": 43, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Please enter your OpenAI API key: ··········\n", - "API keys have been set.\n" - ] - } - ] - }, - { - "cell_type": "code", - "source": [ - "from dataclasses import dataclass, field\n", - "from typing import List, Dict, Union, Optional, Tuple, Any, Callable\n", - "from datasets import load_dataset\n", - "from adalflow.components.model_client import OpenAIClient\n", - "import adalflow as adal\n", - "from adalflow.core.component import Component\n", - "from adalflow.datasets.types import TrecData\n", - "from adalflow.eval.answer_match_acc import AnswerMatchAcc\n", - "\n", - "\n", - "_COARSE_LABELS = [\n", - " \"ABBR\",\n", - " \"DESC\",\n", - " \"ENTY\",\n", - " \"HUM\",\n", - " \"LOC\",\n", - " \"NUM\"\n", - "]\n", - "\n", - "_COARSE_LABELS_DESC = [\n", - " \"Abbreviation: Questions about abbreviations and their meanings\",\n", - " \"Description: Questions seeking descriptions of people, things, or concepts\",\n", - " \"Entity: Questions about entities (e.g., animals, colors, inventions)\",\n", - " \"Human: Questions about people or organizations\",\n", - " \"Location: Questions about places, cities, countries\",\n", - " \"Numeric: Questions seeking numeric answers (e.g., dates, amounts, distances)\"\n", - "]\n", - "\n", - "\n", - "template = r\"\"\"\n", - " {{system_prompt}}\n", - " {% if output_format_str is not none %}\n", - " {{output_format_str}}\n", - " {% endif %}\n", - " {% if few_shot_demos is not none %}\n", - " Here are some examples:\n", - " {{few_shot_demos}}\n", - " {% endif %}\n", - " \n", - " \n", - " {{input_str}}\n", - " \n", - " \"\"\"\n", - "\n", - "task_desc_template = r\"\"\"You are a classifier. Given a question, you need to classify it into one of the following classes:\n", - " Format: class_index. class_name, class_description\n", - " {% if classes %}\n", - " {% for class in classes %}\n", - " {{loop.index-1}}. {{class.label}}, {{class.desc}}\n", - " {% endfor %}\n", - " {% endif %}\n", - " - Do not try to answer the question:\n", - " \"\"\"\n", - "\n", - "@dataclass\n", - "class TRECExtendedData(TrecData):\n", - " rationale: str = field(\n", - " metadata={\n", - " \"desc\": \"Your step-by-step reasoning to classify the question to class_name\"\n", - " },\n", - " default=None,\n", - " )\n", - " __input_fields__ = [\"question\"]\n", - " __output_fields__ = [\"rationale\", \"class_name\"] # it is important to have the rationale before the class_name" - ], - "metadata": { - "id": "ZZIEtZYHNVjo" - }, - "execution_count": 49, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "class TRECClassifierStructuredOutput(adal.Component):\n", - "\n", - " def __init__(self, model_client: adal.ModelClient, model_kwargs: Dict):\n", - " super().__init__()\n", - "\n", - " label_desc = [\n", - " {\"label\": label, \"desc\": desc}\n", - " for label, desc in zip(_COARSE_LABELS, _COARSE_LABELS_DESC)\n", - " ]\n", - "\n", - " task_desc_str = adal.Prompt(\n", - " template=task_desc_template, prompt_kwargs={\"classes\": label_desc}\n", - " )()\n", - "\n", - " self.data_class = TRECExtendedData\n", - " self.data_class.set_task_desc(task_desc_str)\n", - "\n", - " self.parser = adal.DataClassParser(\n", - " data_class=self.data_class, return_data_class=True, format_type=\"yaml\"\n", - " )\n", - "\n", - " prompt_kwargs = {\n", - " \"system_prompt\": adal.Parameter(\n", - " data=self.parser.get_task_desc_str(),\n", - " role_desc=\"Task description\",\n", - " requires_opt=True,\n", - " param_type=adal.ParameterType.PROMPT,\n", - " ),\n", - " \"output_format_str\": adal.Parameter(\n", - " data=self.parser.get_output_format_str(),\n", - " role_desc=\"Output format requirements\",\n", - " requires_opt=False,\n", - " param_type=adal.ParameterType.PROMPT,\n", - " ),\n", - " \"few_shot_demos\": adal.Parameter(\n", - " data=None,\n", - " requires_opt=True,\n", - " role_desc=\"Few shot examples to help the model\",\n", - " param_type=adal.ParameterType.DEMOS,\n", - " ),\n", - " }\n", - "\n", - " self.llm = adal.Generator(\n", - " model_client=model_client,\n", - " model_kwargs=model_kwargs,\n", - " prompt_kwargs=prompt_kwargs,\n", - " template=template,\n", - " output_processors=self.parser,\n", - " use_cache=True,\n", - " )\n", - "\n", - " def _prepare_input(self, question: str):\n", - " input_data = self.data_class(question=question)\n", - " input_str = self.parser.get_input_str(input_data)\n", - " prompt_kwargs = {\n", - " \"input_str\": adal.Parameter(\n", - " data=input_str, requires_opt=False, role_desc=\"input to the LLM\"\n", - " )\n", - " }\n", - " return prompt_kwargs\n", - "\n", - " def call(\n", - " self, question: str, id: Optional[str] = None\n", - " ) -> Union[adal.GeneratorOutput, adal.Parameter]:\n", - " prompt_kwargs = self._prepare_input(question)\n", - " output = self.llm(prompt_kwargs=prompt_kwargs, id=id)\n", - " return output" - ], - "metadata": { - "id": "3Q3H9XC4Ncfi" - }, - "execution_count": 50, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "class TrecClassifierAdal(adal.AdalComponent):\n", - " def __init__(\n", - " self,\n", - " model_client: adal.ModelClient,\n", - " model_kwargs: Dict,\n", - " teacher_model_config: Dict,\n", - " backward_engine_model_config: Dict,\n", - " text_optimizer_model_config: Dict,\n", - " ):\n", - " task = TRECClassifierStructuredOutput(model_client, model_kwargs)\n", - " eval_fn = AnswerMatchAcc(type=\"exact_match\").compute_single_item\n", - " loss_fn = adal.EvalFnToTextLoss(\n", - " eval_fn=eval_fn,\n", - " eval_fn_desc=\"exact_match: 1 if str(y) == str(y_gt) else 0\",\n", - " )\n", - " super().__init__(\n", - " task=task,\n", - " eval_fn=eval_fn,\n", - " loss_fn=loss_fn,\n", - " backward_engine_model_config=backward_engine_model_config,\n", - " text_optimizer_model_config=text_optimizer_model_config,\n", - " teacher_model_config=teacher_model_config,\n", - " )\n", - "\n", - " def prepare_task(self, sample: TRECExtendedData):\n", - " return self.task.call, {\"question\": sample.question, \"id\": sample.id}\n", - "\n", - " def prepare_eval(\n", - " self, sample: TRECExtendedData, y_pred: adal.GeneratorOutput\n", - " ) -> float:\n", - " y_label = -1\n", - " if y_pred and y_pred.data is not None and y_pred.data.class_name is not None:\n", - " y_label = y_pred.data.class_name\n", - " return self.eval_fn, {\"y\": y_label, \"y_gt\": sample.class_name}\n", - "\n", - " def prepare_loss(\n", - " self, sample: TRECExtendedData, y_pred: adal.Parameter, *args, **kwargs\n", - " ) -> Tuple[Callable[..., Any], Dict]:\n", - " full_response = y_pred.full_response\n", - " y_label = -1\n", - " if (\n", - " full_response\n", - " and full_response.data is not None\n", - " and full_response.data.class_name is not None\n", - " ):\n", - " y_label = full_response.data.class_name\n", - "\n", - " y_pred.eval_input = y_label\n", - " y_gt = adal.Parameter(\n", - " name=\"y_gt\",\n", - " data=sample.class_name,\n", - " eval_input=sample.class_name,\n", - " requires_opt=False,\n", - " )\n", - " return self.loss_fn, {\"kwargs\": {\"y\": y_pred, \"y_gt\": y_gt}}" - ], - "metadata": { - "id": "HpkQYsh2NevT" - }, - "execution_count": 51, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "def train(\n", - " model_client: adal.ModelClient,\n", - " model_kwargs: Dict,\n", - " train_batch_size=4,\n", - " raw_shots: int = 0,\n", - " bootstrap_shots: int = 1,\n", - " max_steps=12,\n", - " num_workers=4,\n", - " strategy=\"constrained\",\n", - " optimization_order=\"sequential\",\n", - " debug=False,\n", - "):\n", - " print(\"Starting training process...\")\n", - "\n", - " # Define the model configuration for all components\n", - " gpt_4o_model = {\n", - " \"model\": \"gpt-4-turbo-preview\",\n", - " \"temperature\": 0,\n", - " \"max_tokens\": 1000,\n", - " \"top_p\": 1,\n", - " \"frequency_penalty\": 0,\n", - " \"presence_penalty\": 0\n", - " }\n", - " print(f\"Component model configuration: {gpt_4o_model}\")\n", - "\n", - " try:\n", - " print(\"Initializing ADAL component...\")\n", - " adal_component = TrecClassifierAdal(\n", - " model_client=model_client,\n", - " model_kwargs=model_kwargs,\n", - " text_optimizer_model_config=gpt_4o_model,\n", - " backward_engine_model_config=gpt_4o_model,\n", - " teacher_model_config=gpt_4o_model,\n", - " )\n", - " print(\"ADAL component initialized successfully\")\n", - "\n", - " print(\"Initializing trainer...\")\n", - " trainer = adal.Trainer(\n", - " train_batch_size=train_batch_size,\n", - " adaltask=adal_component,\n", - " strategy=strategy,\n", - " max_steps=max_steps,\n", - " num_workers=num_workers,\n", - " raw_shots=raw_shots,\n", - " bootstrap_shots=bootstrap_shots,\n", - " debug=debug,\n", - " weighted_sampling=True,\n", - " optimization_order=optimization_order,\n", - " exclude_input_fields_from_bootstrap_demos=True,\n", - " )\n", - " print(\"Trainer initialized successfully\")\n", - "\n", - " print(\"Loading datasets...\")\n", - " train_dataset, val_dataset, test_dataset = load_datasets()\n", - " print(f\"Datasets loaded - Train size: {len(train_dataset)}, Val size: {len(val_dataset)}, Test size: {len(test_dataset)}\")\n", - "\n", - " print(\"Starting model training...\")\n", - " trainer.fit(\n", - " train_dataset=train_dataset,\n", - " val_dataset=test_dataset,\n", - " debug=debug,\n", - " )\n", - " print(\"Training completed successfully\")\n", - "\n", - " except Exception as e:\n", - " print(f\"Error occurred: {str(e)}\")\n", - " raise" - ], - "metadata": { - "id": "PEj6xiZ5dVaj" - }, - "execution_count": 52, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "from adalflow.components.model_client.openai_client import OpenAIClient\n", - "\n", - "\n", - "gpt_4o_model = {\n", - " \"model_client\": OpenAIClient(),\n", - " \"model_kwargs\": {\n", - " \"model\": \"gpt-4o-mini\",\n", - " \"max_tokens\": 2000,\n", - "\n", - " },\n", - "}\n", - "\n", - "\n", - "train(\n", - " model_client=OpenAIClient(),\n", - " model_kwargs=gpt_4o_model,\n", - " )" - ], - "metadata": { - "id": "GnlZBQOMEj6E", - "collapsed": true - }, - "execution_count": null, - "outputs": [] + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# 🤗 Welcome to AdalFlow!\n", + "## The PyTorch library to auto-optimize any LLM task pipelines\n", + "\n", + "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", + "\n", + "\n", + "# Quick Links\n", + "\n", + "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", + "\n", + "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", + "\n", + "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", + "\n", + "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", + "\n", + "## 📖 Outline\n", + "\n", + "This is the code for a classification optimization tutorial ![image.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA+gAAAJYCAIAAAB+fFtyAAAAAXNSR0IArs4c6QAAAERlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAD6KADAAQAAAABAAACWAAAAADDsFQWAABAAElEQVR4AeydB5gURRqGe5clLJJUMnqIiCiKYBbBnOMZThEDYMCcc1bMeurpKYZTDHcqYsSM2RMFwawcZhEUliRZ4rJ772xJ0fSEnZ2Znunu+ebhWaqrq6v+eqt75qu//64uqa6udvQRAREQAREQAREQAREQAREINoHSYJsn60RABERABERABERABERABGIEJNx1HoiACIiACIiACIiACIhACAhIuIdgkGSiCIiACIiACIiACIiACEi46xwQAREQAREQAREQAREQgRAQkHAPwSDJRBEQAREQAREQAREQARGQcNc5IAIiIAIiIAIiIAIiIAIhICDhHoJBkokiIAIiIAIiIAIiIAIiIOGuc0AEREAEREAEREAEREAEQkBAwj0EgyQTRUAEREAEREAEREAEREDCXeeACIiACIiACIiACIiACISAgIR7CAZJJoqACIiACIiACIiACIiAhLvOAREQAREQAREQAREQAREIAQEJ9xAMkkwUAREQAREQAREQAREQAQl3nQMiIAIiIAIiIAIiIAIiEAICEu4hGCSZKAIiIAIiIAIiIAIiIAIS7joHREAEREAEREAEREAERCAEBCTcQzBIMlEEREAEREAEREAEREAEJNx1DoiACIiACIiACIiACIhACAhIuIdgkGSiCIiACIiACIiACIiACEi46xwQAREQAREQAREQAREQgRAQkHAPwSDJRBEQAREQAREQAREQARGQcNc5IAIiIAIiIAIiIAIiIAIhICDhHoJBkokiIAIiIAIiIAIiIAIiIOGuc0AEREAEREAEREAEREAEQkBAwj0EgyQTRUAEREAEREAEREAEREDCXeeACIiACIiACIiACIiACISAgIR7CAZJJoqACIiACIiACIiACIiAhLvOAREQAREQAREQAREQAREIAQEJ9xAMkkwUAREQAREQAREQAREQAQl3nQMiIAIiIAIiIAIiIAIiEAICEu4hGCSZKAIiIAIiIAIiIAIiIAIS7joHREAEREAEREAEREAERCAEBCTcQzBIMlEEREAEREAEREAEREAEJNx1DoiACIiACIiACIiACIhACAhIuIdgkGSiCIiACIiACIiACIiACEi46xwQAREQAREQAREQAREQgRAQkHAPwSDJRBEQAREQAREQAREQARGQcNc5IAIiIAIiIAIiIAIiIAIhICDhHoJBkokiIAIiIAIiIAIiIAIiIOGuc0AEREAEREAEREAEREAEQkBAwj0EgyQTRUAEREAEREAEREAEREDCXeeACIiACIiACIiACIiACISAgIR7CAZJJoqACIiACIiACIiACIiAhLvOAREQAREQAREQAREQAREIAQEJ9xAMkkwUAREQAREQAREQAREQAQl3nQMiIAIiIAIiIAIiIAIiEAICEu4hGCSZKAIiIAIiIAIiIAIiIAIS7joHREAEREAEREAEREAERCAEBCTcQzBIMlEEREAEREAEREAEREAEJNx1DoiACOSYwC+//FJSUvLII4/kuN5wVrdzzSdN2+F29dVXp1k4V8WyHK/33nsPs/mbK3tsPf7VbJtQok4E/D4/PRfL9OnT//a3v6299tq0e8cdd/h6Pqy33noDBw6sEw0VFoGCEJBwLwh2NZohAb6+U3wykA6LFi1CJ6V54Kuvvkrr7du3r6qqyrADITnM/EA+88wzHntPP/10CHgy87+J0Dz22GM7d+7cqFGjtm3b7rjjjldddVX+zYhvccKECZxOmBe/K1c5v//++wUXXNC1a1f6vtZaa+21114vv/xynSp/4okn0EB1OiQ/he+5554wTvYY8RRfSihR6KEIbZmGDRtuuOGGV1555ZIlS9xgbQF34uSTT3aX4cI85JBDOOcbNGjQunXrAw444LnnnnMXiE//9NNPRx55JIXLy8u7dOly2WWXxZdx53zxxRdHH330uuuui52cYLvvvvvDDz+8YsUKd5m8pc8555zXX3/9kksu+c9//rP33nvnqt3Ro0czanPnzs1VhapHBPJJoCyfjaktEciSAF/ftoZ///vfb775pjtn4403tnvTTCDcBw8eTGHz+5r6qMcffxyvDLLsnXfe4fcsdeFi3tuxY8fFixfXr1/fDwg//vjj1ltvjQo57rjjGI6KiorPPvvs5ptvNuPoR4vp14lwxwzOJQyzR73xxhs2nWXiu+++22233WbOnMm8ZauttkJ5cE6i3s4///y///3vaVaOcB8/fvzZZ59ty2c5XkycGG6kpK0wswTCvWXLlm6vZ65qzsyeNI9CSW+wwQam8MKFC0855ZSDDz6YTJPTpk0bk0AHP/jgg6TnzZv3wgsvXHvttUhqhs/sNX/32GOP/v37u3OQ+HaT2ek111yD+D7ppJMYMqZwuBIOPfRQKkGa22LuBCqcs7FDhw7nnXcefuvJkyf/+uuv7gKeNBYyVcDmY445hoYWLFjw9ttvH3/88Vxll156qaewH5uei4Vv2r/+9a+c3qYtaOTkTEO4c51yprVo0cL2gourtFSuTMtDieASkHAP7tjIsngCuIJs5kcffYRwd+fYXX4k/vjjD35ub7zxRvxP/FLmX7hjwBprrOFH13JeJy5D/ME5r9ZU+I9//AN5hCJBu9gmZsyYYdNBS2SvaE2Pli9fTtjAnDlz3n///W233dZk4pI86qijbr31VnR83759M+t7luOF3PFpuP2rOTNQCY/arOZjds2aNQvhTkb891JZWZnNPPXUU7fffvthw4bdfvvtVtlTA8LUlvG0xe0vVDsnAPMuOyXm3gsOaU4MT2GzyY1B9PdGG2307rvvMtFNWMadyTcqqr1Xr17MB5o2bWp2McH75JNPmOm5S/qX9lwsXNdube3r+cDMyr9+qWYRyCWBan1EIJwETjvtNK4Eazs3c5F03bp14/uX+8Innnji7Nmz7d6PP/54zz33xOeEwsAbisOSXRMnTvRcS/i07CGeBK59fjbwPOHcbdasGY4fdwE2ORYfFa1zIxuXG45hUwDDiEzYdNNN2YVDkdgGjGGXaZ1pgLse7LE2kGDzf//7X79+/fj16tmzJyW//PLLAQMGdOrUidr4yacjaAV3Db/99huu6Hbt2vETSE/5JV66dCm+PapCJbhLfvjhh2SiA9yZJs0vPbuefvppzy4Pc9xjvXv3bt68OTMKNAd3tE15T9cwmAIYhvOMBBDw/1VWVtrK6QJ6Ba1AVXgcEeW07iFjCwOQftnNhAmUR58+fRo3btykSZN9990X2eEu9vzzz2+yySYA5C+RBpjHHMAWSH0iUXK//fYbNWoUXn9qYCAeffRRcywGY7b7A0Z27VTzMWUYiyuuuGKLLbbgFMI8jMSnaHaZvxxuTwB3Pml0HntRb558/O6cHugzk2/G7sknn2Q4OENoBZc8rlazF1vcFpqOJxyvSZMm0VPGi9iwu+++m8O/+uqrXXbZhQr/8pe/MHe1ZpgWTWfjIdAcjZrCDz30EDW0atWKk5P7Y7jYbSVY4jbMHOKu2ZR86qmnoMdVzLXMjIWTytZQ62lmS9rEkCFD+MbAGK4XxDSTIrsLAzg9uPrwWKN6gcCFb/cmS3AzhF7Ej6CxzX2U8SLj+rWZHMj1ZTc9CcaXwJX58+d78lNsvvbaa9TJtUAZpv3uKy7hUcSiMLtg3BPuNZnu3nHvkVkKFz7DgW1MKjiR7LHLli0jHIV7EVwm7OWLgq8Ls5dvUbzd3AeAPN+WBx54oD0Q7HwoFn8ikRl/PjDZ2GeffTj/OS27d+/ON61pIsX3pPlepSP2Y1rnDGSYzOH85TuTHq255pqMPvNkAtLsLmPG8OHDr7vuOnpBB3fdddcffvjBFlBCBHwlII+7vXiVCDcBbh8TIIuQPfPMM/kiRmp8/vnnaFO8U7htUO3IhYsvvpiveH5vTGAoOffee6/71jausmQUUCpoDn5mjjjiCOp56aWXDjvsMFMYqbf//vtzT5ldZ511FveXuRWAWCQImwLcaMYwfl1OOOEEfjvRfPzY4B9N1pAnn1aYD9xwww18EbCLmn/++We6iSWoin/961/8pUKcpuydOnXqNttsg5Jj3sIv/ZQpU3DUEQ60/vrr88NJF3DQ2vrZRCsjpm1OnRK0S68hhpTkp4uJipkJJKwERAhufv/wDb/11lu33XYbcCBPYfyCKMtx48axic3c1uDnM2ElJpPfV2pA7/JjmbAYUyxqoDmUFn1niNHHnAzIfcqjHoguQK5x84RgA0ius8467npSnEimGD3lF51hpRWUKBJkyy23ROQR18G5989//pOgAhO1FR+7hfAiGoGZ2KBBgzhPhg4dip30nVmZ24aEaU458j2hFOQw22EQmT9gmI3ZuP766zklLrroIk5+1Aw3iJgOIUEIcSZUA73LLJdjmdgkbIvx4oylR7fccgvnCc82oOA5Fq1MEMh9992HGbhmmbd4DucQd/QaKvDyyy9nIm2KMRaAQqghEOkOWpnRN7NBjDzjjDOwxwRhu/3QtglzgTNlYux4bPHOO+/klGNkuahNmRSnma3EJpCVxEtAhhOPMAlsY0ZtvjFMGXQ8Wpb+Hn744VxHwEQagsXWkE2CbyEORxe6KyHqnUmsO4cJHuoWUfjtt98yIbeOcHeZZGkuE3ZxbfJt8+mnn1IPDgUmS8jo+EO4UvgGY/iYlcXvTZgDLiYefOlxBdEdADLJIVoMDU158DJMfOnxjcRpj9uekDZigdjFBci3B8PNJcn5yXcaE0tzedqGzInEHYP48CFbhgP5CmLSxbcu34fffPMN8po0BVJ8TzKg33//PdNgLgGcCBTmh8DWaRKcXdwSgQlXNFNELi5OWs4BANqSN910E64cJmBcUFwmXBpjx461e5UQAR8J+DotUOUi4B8Bt/cXNcxFgsKwzY0cOdLm4GElbfzctoBJJPOQeYrxPY7UeOCBB0w+3+lIJVsG9Ub9Hn82ioQC6Et28e1vC5Mwu5hdsAvHknsXOdZdZzxDiDx3AX5L3JvGC0vshMlETvFb4umpae7++++ncn7bTEn8YfxouT1M7mqNSym1x90oPwC6DzRpT9eMEHe7ijfffHPErin87LPPYph1laG9jCL3kLGtMCMy9/0Ru/xIjxgxAm+i3YsaRsYhi23OtGnTkLY2h6P4pWduYwqYmFrjeCYn9YlEAUpirQWO7EAYcQPB1AYx9kLPbJq/1onIJjM3nO52L9IQhYogszkcbk8Am2kSWE5HPJlmk3OPA1988UU2zdjhCLTeWbzU7EXmmsL40W1/TU7C8TJzRQpgJMCZBuDFN+URkW47TYueXlOS21CMMr5qPKzmQM/Zy6SFKaXZxV80vfG22hx3zZyxTAC4c2VvdpmncnnK05RPfZrZOk2CgUPIMp/nfDM5TPXpFNey2cQSNnmQxmwyakhDFKfZTPY32fcJtjHzYS8f5ldMX+FJX8y1aWqjufgPFzh7mc2yiysuWbsJ89GaHGVuTSA6udXDlxjfXe5G7YG4qCnMBWVzEiYoY89Pz2iOGTOGvZZYjx49ONPiK+F0ohiPZMTvIgfsfOwuSrrvQrjPBy4l5o2cyVRoy9uueWzzfE+aB0I47e2BJKiKYTI55gkQvg3MJt8qtMXUwpwtxgym5fZa5uLC1K+//tqU118R8JWAHsXgctMn9AQQTGgafDP4q8wHxYD3znzDGoccP/PJgkFr7T+SBUHMz7YpiZjmNrT5BSIH6YkIxoHkrse4wNlFwkhwu9fsspupE8S6uAsYzUqOcc5tt912pHFl8ZcfLVQs3muPO980h9eQO9rMbUxthMYCKllArSmT+q+hiqSg3dQlzV53R3bYYQfuG5h8pljcFUFYm004mylZsjqRdziPsRwnH7+XBx10ENqXOZUpj6cNUc4ArTwRZtWrVw9PvzkTUJAcy88zZ4spzzmD9922lfpEMsUoj/0mja+OBV5sX2w9yRIYg15kL9AI5UJ8MFhm+JIdYvNRD8kcriYfpW4LM4Wzhbk/wFyFkAm7N50EvlJTjIGmj+hOTiGTwyaZtfYahzpShksAyWsOtGcvTkoGCIlGJaTTsQeXLWqbOjmNTXl0IbdoXnnlFffhyU4zdxnSeKOZCaDPON/MLs5A3Nvu2vgCsRcIo4bnuNYue1pxbzK95Gzhw10R3LTcAePa8XwV4A7gBHZ/uMtHJWZk7YC6q02R5lEQ9nKD4rHHHuO7i5kzT8TiI8ezHn9UBk3Y0eR7lZtX9Iuzwp7MpHGrc6/A0xZHAZPlcez3p6dAmpvcaUF5M4I0ZA+xPK1t8d+TtnCKBBcLw82dOlOGM4F7mHzhcD/BHsXNOnMtk2O+ELI5PWy1SohArQQk3GtFpAIhIMDPAz//OOTMT6P5y+8Wv/RYjz7gd4vb4shrfhpx5eIpqVOv+OXje5wfJ7xlfHAY86tv3KvUQzQkUgZvVnyd7MLjmPDedHzhhDmeaATUHl4xpCq/THTT7DXSB2cev7648RLWw88bmp6IdrMXBY9TNlm0ScIaPJk8Con4QN5hDLfLceumUPCILay1NRAhYH+2iaZAVprb66aAjfew5T0JwmqJx0D5EXWNYxjy/KyawAAjFOiX+0zArW7OBNqiKkKP3BUydnYz9YlkinliCdx9sfWkSHDbnfgigOAKxUiUYprKFd2Gdk9Ys8l3Czt3H1EzIDWxGQkPj8/0jBfzHMIhrCqiPDl2BOMPJ4c7PFxod911l5lbmjIEohCawhyAs5G+m4VK0uy+GTv3YFEnwt3km/o9ZqcYmvjaEGG4/921ebqcojbTeuq/2GYUOVhw1nJCWnFpD6RF+Lg/XFzsZUbB32Sjbw7nzpL9cFOCTFM/k1hTgL9m8Rm0u82xiXSasIVNgla43WEWjuSrlQFlzmxHk3kCm1yqxBfxEC2XqjmKO1TEsOH4oGsmFguzPTWns2me20n2dZfiezKdyjkNPGeaCXtznx7u7wHODapNfUWk067KiEA6BBJIjXQOUxkRCBQBJCOq3bqTrW38lpBGcHCnmEBwwmrxNBOZQIw1m/hRbMkUCcQcwScUcIshNmkOvZjiwNS73DLIlOQ+bPwhnl93vJ787vJDSOAE9tNxwnBTKGZ3hXhhmWxwOD+lhFXgvLTuRncx0ogM/pqff/cubkCbXWRiGBEjeLKRnnjNeVQLuYxExqnsPsSkE2bGF6tTDnXSET4EW+OYZDhQPAYFst56eU2dCadV8c2lPpFM+fi+cFc0vqqEOcwAiYnnLgEjyBlLVcQBGwmSsLw7E+nA7QKigd2KwRQwqsh968B9YAbp+D7G56ToNVH7TC+Z1LkvELrJWpZIbQJ7UHsIZfyahH+kefam04t4I9M5KlmZ+NpSdDlZJTaf2jg/zSYxQnDgaQouQ1sgRYLC7OX2RYoyzH7tXuYGnGa4DMgx0t/s4pQjkVBfMrXjGkndhK3fJLjHSEP4vLkAmcjxhcYE3o4mopwR564C3wk818FA82iEuY3DITgRuD3ItzEBPFwChBTiDfHUn81mNt+Tabab29MjzUZVTAQgIOGu0yAKBHjSEYcrDmCPzHX3Dc8fHx7aw+vMg0REv/ArEq+e3YeYNIqQWA60oPub+oMPPuAxRKOiaJ3HkrhfTDHP4ezixwn3T7zT3ThpcErZQ9zuHJvpTvCLy21ubh3g6DL5xsFs0sxScJulWLgNiU8ZukPoCBKcB7/clbvThHuyyRN77kyTY3aZfHQ/UowPUgzPN48VouOtOvEcm2yTCjkKe6zTnXsayQonzDehQYTBsBfg/EWgJDTDGO+GRmF3N9M5kRLaYDJrPZ2YQOLW5dloW9ITRpWicp7DI1SXGGIe93QX4zYL8ght575T4e4jchOk9sFr27S7khymufNDcA4TS9ZscVfLtJk7XUhVO/Fg3N0FUhtmz0n3bSLGzn1CumtLnba1MRymJPfQCL1IeNqkriqDvYhsHhPnQsZ9wJdSrTXgt8YBzCgTG5bM3YA739ZDOBlpwgUJIeMJdZvPw+ukjTvDZpoEVx9gEdAs9M60yrM34SYnM1FnOEHMXoJS3N9mZPKlRzwJH25+ouN5XNUId3ZxofFkCB9OVE4VKmFOm7CVZJnmSufrLn7IUn9PUmHqM40CnB7urwVyzHMdmZ1sybqgfBHIjIBCZTLjpqOCRQD/Cu5qIjjdZhFAbH5I+B53u8r4naCYiZYxYtHze+OuhDRKlxBGIkOQI/aDx5Rd5pkn4nAI2zAPt9ljTYvsIsEvtM0nYXYhsrm/jNPa7mLBB5tOmDAzB3dfeKbTlkRG48pFHhENbDNJ2PJ41LhvTkwLq3PgqLZKzl3YpBEWUOKn1E2GhSnQGXZVDWYj7gPdVN35tabxPjLnsUHqeOw8gs9TA0+MeZ5VMNHb5tY2tQGWWYSnDGqSeky/CFaxN/SRO+641dQnkseS+E2CQMh0Q/OU8Ywg8z3zSJ+nWMJNzj186qxl4R5fcLEoCme4ZwKAvreRFQgsZjV24DDSdj9hQ9lkchnidkUEE9puI4BNhZ6+YwP+WndbGJYCHdMz5mN4bW2cG+EWPGxNpLu7kjTTqD3MY+5trw5W+MGkzGpLs1F3MdzVfPkwmu7MFGm+QwjVQ/jyteYuhj/bPKRLj+zHeN8JCyQuBcjWC47nm2PN0i7uSkzaPHXKfN4Ex9sCXPhcMnbTJhhQS49MwqLc9wyx1pZkssGs0gwcU3Qkvt2F/ibEy46pza81waqgBAryBeg+Z4w9njONqtzfk2zWep2yhix3jey1yfMJrN/Fw6k5vKlVawdVQASSEZDHPRkZ5YeJAFHs3HfmliuxBKwUgecbRw5hITiokDv86qCJWcmLHwnUDBoRbcdXMz3EQ893MWEe+LTwDxEx6QmaRFrhrWQ5PA8OAsT55UDTs0gcISjopHPPPZfveiQ+3/K4/wlE4YeTEA5+CNEH2GNiWtCdZJoK+Rnml5u/iBIUPIuUeVrxbGK2iQpFlWIAv9lmPRBbDMFKJjQIUSCyArkGBG4O2Oe3MBVj8HQSZmqPSpjAiY4IRo6be+4oJH66EASsDm7KE8OKzQgdvFAE7EKYCF37OFfCOhNmMtng+QF8b3DGbYxH1kwJknnFsBwlwZpuZuLBw3DAZ+y4/079IGJZOpgzOihInIvcFSGYh7sxZmbFSYLN2EnEFA2hNnBPWqWS+kRKaL87E1yIBixEAqKZcGGa4ARbBq857nZORWxg7JChnH62dVssYQKhiQTn/gbG48XknEGycPsIAtCjs+6jAGKKsSASqgXZZB//xRHLCc/pymOLKCqCFtwHZpmmR3hteULU7U0nWgOxyIVJF2iOS5UucxkCx9wnMY1iGGPH2thYyy63Z50CXNSApeOMEfNPsxwkWsq9wmn6xnNicCajhrkqWX0F9yonMEDs06jpV5VZSZ5woC80ypVl4qeph28Aj+PZoGMXjgPiWLhhyEOZdJ+LDmVMiBq34OyDKx5LiBbjJhh35+gjVxnrxsCcY+mmp6TZZMEZ5sx8cXEZcgURGci3JU+RckkyKPGHcDJzH5IgGc5hNC5fenTKFiOT1SEZU05FppqcuuZLjz5yDjNDpgCuBJb8Yig9Z6+tJEUCPwVnC6cTFx0k+WrCKc7jsNzhrPV7EquoGTi0y3lFJUbK2+ZY8BenDHNdFgTDfn5BuFqZi9KoLaOECBSMADNUfUQgjATcy0Ea+1GWfCOjxXHh4FG+8MILuTXMLpQNP1fcoEdLIQj4veGHxHaZmG+OQlJwERqfk91FwqwVQ7CmO9OkufPLIfwcsokbiZ8BPED8DPB7yWzBHoKHjNXH+C2kCeQCPwboTlMDR7EcOL98GMwvmXmA0tpgfKi4it1NswI3sg8hzlEs8W7ufdtDKEm8DeqchugsYQBQwpvlrgGdys+P+8017r3uNP51WBHSw+8r8wQmGO6jUAzMTIijpV/8hTA/yeZwM53A1Wc2uZ/O76K7ZtM1m0MfeWwOCHSKeYJZD94uPmiLmQR76RTzKwpDm2HlEEvblEE1MuugABH5zNYo4B5xfoCRSvBBOsS/gIkakp1I7EIwoblNK+YvOpKPzUEbgd34/DCD/Jr9fxbA98nkikponaBefKXAYZNi5pPwJFy5M/Y/JwmaG2lLDZwGOFnRVe4CRjEjOxCmnO1cDhjMWWHLIJqhbeZypulax4sucNrYGkhwoOVgWjSdNSNLL9wfywdTzYO5CG5UuFlHldZNzTykSJ2cBhxrDnHXbMow5YAbfUdOEfDmPiFrPc1MDe6/zOW4MDmL0MfmxoXdG99lz0jZku6EubHjvh7N3njbyOek5TxhlynjJmbTFp0pYy46hpVLkmscxUn8jNmV8C/nG1NTvBL0kQAYgqy4GZKwpM3k24nTgyuaQ7j2EdnIVlzppgCG2d5xnwfFzG1Dpn9cbuhmzgrbHbQ+E3JOM85AIDPlME1zc5Lrlxy+E7hCCdvjHqBtnf66u0xzFLZ7488HvBLMCTlnqI1Ti86awpwYqb8nuT3Ld5oR4uYMdBtPJYwOX+PYz3cIHeFS9ZiBT8TmeK4gm6+ECPhBoIRK7XeEEiIgApEngO5B9KAAAttTnlrjR5efZNzkeTASWY9bsU6LruTBqoyboC/c0kFVIDsyrkQHioAIiIAIBJOA7vsEc1xklQj4QgDHM9FEuOR9qT3TSt3L1+Dbw23GzW5iXTKtT8eJgAiIgAiIQDQJKMY9muOqXomAhwDLL3ATnNUbCAYlXtazt7CbxCOh3VlUjqgeYlcIXiKehDvshbVKrYuACIiACIhA0AhIuAdtRGSPCPhCgIfDeJyUpVeIfrZrsfvSUt0r5TFEZhREkbLcBNHbeNzjnwaue606QgREQAREQASiRkAx7lEbUfVHBERABERABERABEQgkgQU4x7JYVWnREAEREAEREAEREAEokZAwj1qI6r+iIAIiIAIiIAIiIAIRJKAYtwLMKysrcvy2yw9m+wVMwWwSU2KgAiIgAiIgAiIQN4JsC45b/vi7QF6xVU67CXc06GU4zKodt6FkeNKVZ0IiIAIiIAIiIAIhJPAr7/+yuu3w2l7Xq2WcM8rbtOYeTUg5yiLVZPD6+t5TT3vA+dNdQWwRk3mjoCGMncsC1xTMQwlS+Z//vnngOadXOZVrwWG7k/zxTCU/pALXK0aysANSaYGeYZy/vz5eDONNMq0yiI6TsK9AINtImRQ7Va4N27cmLSEewEGI6dN8mWkocwp0YJVVgxDySvoR40aBeI+ffo0aNCgYKx9brgYhtJnhEGpXkMZlJHI2o6EQ6ng4TS56uHUNEGpmAiIgAiIgAiIgAiIgAgUkoCEeyHpq20REAEREAEREAEREAERSJOAhHuaoFRMBERABERABERABERABApJQDHuhaSfsG2eGCP8K+EuZQacAANXVla2ZMkSBjHgphaPeURva4mx4hlu9VQEREAEok1Awj1A48tSptOmTZs7d26AbJIpdSHACLZt25b1gvSQTV2w+VsW1d6pU6cIP3zpLz7VLgIiIAIiECQCEu4BGg2j2lu3bs3KJFJ+ARqYtE3h1VoLFy5s0qSJXLxpM/O3oHnZWUVFxV/+8hddU/6yVu0iIAIiIAL+E5Bw959xei0QXIGvHdW+9tprp3eESgWOADKRJfYaNWok4R6csWnVqhWvPKusrNRyq55BIayrX79+ZJLw7NKmCIiACIhAMAno+zoo44KwwBR87UExSHaIQCQImCAZJsYS7p7xZHq54YYbejK1KQIiIAIiEGQCWlUmKKNDeDSm6G5+UMZDdkSFgK6pqIyk+iECIiACIuDI466TQAREQASKkQB3Ib7++mt63r1793r16hUjAvVZBERABMJGQB73sI2Y7HUReOSRR1q0aOHKSCuJC3bEiBFpFU2v0I477vjEE0+kVzbQpTLjmU6XJkyYsM466/zxxx/pFFaZ/BBAuL9Q8yGRnxbVigiIgAiIQJYEJNyzBFjgw1dUVY/56fcXvpjCX9I5sWbnnXc+++yzs6kq+xqStb7eeuvdcccddm/fvn2///57u5lmgjVG9tlnnzQL11rsxRdfnD59+hFHHPHee+/htlxzzTX5y9zA/WFXrfXYAumoZx6B/fvf/77FFlusscYazZs379Gjx+WXX84jmLaS3CZ++eWX448/nkUVy8vLO3fufNVVV2FAwiZYw/60007jAWuW1jn00EMhY4p169Ztu+22u/322xMepUwREAEREAEREIF0CChUJh1KAS0zcnzF4JcmVMxbYuxr17zRVQd023vTdgE11wez0JF86loxS63X9ZD48jyTgJ+S5Tj++c9/HnvssTznt/3220+ZMmXBggVNmzY955xz5s+f//DDD5sD11prrfgaMs5ZunTpnnvu+dVXXw0ePLh3796smjJx4sRhw4bdddddN954o6daFHb2S5h/++23LJhz//33b7DBBuPHjx80aBC+81tvvdXTFpt0/JVXXnn66aeZTpx++umHHHLIhx9+aIpBiQMvueQSrWESz005IiACIiACIpAOAXnc06EUxDKo9lMe+8yqdkycNm8JOeRnY+7AgQP/+9//3nnnncZhjKuV2tBquKjxobZp0+aYY46ZNWsWmXiRUYSjRo0yzd1yyy2sZYmHNWENpoznLw1ts802DRs2bNeu3cUXX2zW1aEMDns0Hx/EX8uWLa+44grz5C75kyZNQhoa2yjpdk5fffXVPXv2fOihh1ixG1NPPfVUhDVWIdMx7Prrr7etc7gJleEQt1+cNBVSDJGKAjYOZpzZzzzzjDmWLlPmtdde23LLLTH7gw8+mDlz5jvvvHPAAQdQABq0BSL+Mp2gAAk++OAvvfTSDh064B3fdtttjfcdz/Qmm2xy4oknmpp/+ukn5D7GsxeBO2/ePGMYFpoC7r//+Mc/aJp2zzzzTCyhvzvttNN99913ww03mGIGILdNoLfXXnuRiaubOGYMWHfddSHDYvO2QrpMDSxndPDBB//+++82353Ye++9mYQwW1h//fUPPPDA888//7nnnnMXMGnMHjp0KG3tuuuuGMYho0eP/uijj8zePfbYY/bs2Qx6/IHKEYFQEqha4Uwc5Xz9TOwv6Rx+/KsZI0NaedWKkkkfdJg9hr85ph1mJn6dgX4zyeHFUnxVyeMe0DFHqi5envSXgKiYq178nycyhs0Sx7n6xQm9N2hZr5Rk4k95/VggR+J9joNkJ/hk0003veaaayiDN5fV5dFhJ5xwAnpx8eLFF1100eGHH45qRB0iDdHxX3755c8//4y8xs+KbI2vIWFbOKf33XdfVP6///1vHLr4Yln+3OrURx99lNiMcePGffLJJ6hblCUFEIvIaDZJJ6wT+YuqHjlyJIm//e1vWMVqdyhF5ONxxx23++67o5vdByJATz75ZJPz+OOPX3nllVtttRWbqPbHHnsMKdylS5f333//6KOPhgPi2JRkjoGzGQmLIqdyJO/GG2/srtaTZgZChPeTTz7Zvn37559/HhHME4HUTIvYs99+++2///40ga7FSBzkxAJhyXfffUc9zEA8tbGJc53Cm2++uWeXe1gBeMopp1hvNzcEuDPAVAQmCPcLL7zwnnvu4fCxY8fCmf4edNBBcCMGxlNnwk0EesJ7CJ9++uny5cvhbI7aaKONGLgxY8YQJEMOExtmVsz0dtttt4TVKlMEwkRgwovOyIuc+Svj05q1d/a+2el2YA664F/NGBfSymvMLps/NfYFPeleJ4e0Q87ElzPQbybUr08WBCTcs4Dn56Go9m5Xvl7XFtDu0+Yv6X71GykOnHDNXo0bJB13nNwILMQormJTyd13341GtN5cvMJ4bRH3aOLrrrvuzTffREnjkh8wYAC+WA6JryGhMQhH6qFy5CYKj/hspgQIVvPqInYxT2BX165dkbmkEeuIRcLH8Uxb2zw14ynHPAoQUb3LLrugfV999VUqpJKbb7753Xff9Qh3ZLFRxniFiRFH7DJjIRCFzr711lu9evWifgQ67m2iRKxwZ0qDbjZNcweAuYqx2WOM2Zw8eTKOZ/6i2slhqoA+JocmULEAZEZEfDz1vPzyyxQAPgDpeLI+Ugb4zJpImA+eckaB9GabbcYUxWQyMeBuw58lHMc+tMBDAjTKdMUId2ZZTCTQ8ZRkQDkc8+xRCRM//vgjMTkJ42R49S/2ux8XBg6Zth4g0FO7qYQIhJUAOvKp/o7DN+7Kz/yKWM7h/85Wu/tXM5aGtPKQmu0r8PAyWXnF6P+MCSQVcBnXqAMjRgCHOpLXCFzbNVza6DxUGm5j9GLHjh3R1nZvOolvvvkGZWydxMRqE7/x22+/4aPlcHy0dhfFbrvtNuJeal2xDlWKajetIxkpbyU1mzNmzEhoGKoafzOSmjsJFECYLlq0yEpzcvCCu93bxitvquIWBDcKElZrMpl1YDmsbBkmBvbluOeddx5BO8xeuFFgM21Jk0Di8zFpPPeGj7sMEpyIcxzq3Byw+USq2DQJ5iG41bmzQeQ9IUkE6tBHpmeMArrflgR1auHOfRKE/mGHHZbspoetKmGCCCLaTbhLmSIQGgKEmuBrd6v2mOmI+JJY/vo7O6WZrq1Jza8xi3bNB3JVM/WEtPKQmu0r8IIxudjZaL/MT+/YyaxPDghIuOcAoh9VENCCazxZzeMmzh748MfJ9j5y7NbbdEr6NCQ1JzswYT56mhhuPNbuvUSlm03j4iV2mQ8h1O4y+U+7X42J7vds4o+PNwnJy40CBKsJDaKAif/mCUui0m15AtZt2t1NgsjnzJljd8UnqI35AzEk7lmHnQUxl8B9zq4ffvgBQRx/ODl4x82MgrRx2+NNN4E0prwZC0/sittIHlQgGofIGQL9KcYNBMJjmI0g3BO2mCyTuyLcx+AZ3H/9618Jy3CXgGqJrbJOd555cN864CRhUZqExyoz/wR4SpiIMtrV48J1gz9p9Kr4hNWOrI7l37Tuanm52fCvZuwLaeUhNdtX4H4zmeJw8nfaITcntWrJlICEe6bkfD4O3ZkioGWHLq1YQ4anUT2eGULX2zZvxN4UMe61Go4fHSexLcaag88++yzO7Phfd/zuPCr6wAMPDB8+nFAZ3LrGw+2pwVblThAXTrWE8hvPOtHYOMtZ6tuUIfbaFiaOBalqhG86NdsDa03QOsHlCPr//Oc/1sFPmA0yHTe8jY1JUQ+eeEJB0O7EuycsRgFgItB32CHBlx1B7TwziozGgU1ouImV9/QRqc3HXXm/fv0I7Pn888/d9wHcBTxppg30kbsWZnSeeuopW4AWPajtLk8CXzuq3Txyau9jeMqwl8nS22+/zUKQ7GJ2AUYmRbYYIVVGKdocJQpIgHHkCekCGhDWphdOD6vlslsEsiSgkz9LgLk4XMI9FxTzXge6nJUfWUMGpW61O2k+5Gej2qkBjY6Yw02LYxjJyLLcSHPEIpHQbBJJwnOWDz74ICVRvSxawiooeIsRoEjDCy64IL6GhDqPRyR5CvOMM87g2U0UHo9FnnvuubYkgo/Nk0466bPPPiOimpqp1tRMQAhB4WhrvN0mM+O/PAvLZOONN97AL86HeogvZ/5A2AwTEsRunz59eBCTSUWzZs2YmcQ3hHTGDArg0o7fSw5BMkcddVT//v3pAoVZhQZdS3ARz6QOGTKEBzdZ1ZGAfhz8FGOKgmqHP8ZQjCdxcYrH+8XNkos84gk05gPMGXDbE2zjduq7jWENR54ZBSN3TjCVh27tXtalIUiJgPW//vWvr7/+erI4GVQ7UfUERFGSLpjDjSudXVjCE8YsEAQ9JiEMHOcJxBhcVLt5MpVDOKMobB9dtTYoIQIhI9CkTSqDj3rG6bh9qgIp9uHOfDx2DyTxJ5uaqTGklYfUbF+BF5BJ6pM/8Ymr3BwTkHDPMdC8Vcd67fcevYV7HXd87TlZxx3ZikjF8UwANwuEoyNRezw5ymqAxGej3pDpKOxrr73WPlJJtAbhE4h7yiA342uIx0IgCk+OIvQpj85D8OFFtsVQurSOFkSMnnXWWXbZRAJaUPOEW2CJWSPSHpJBgjVhkMjEfthjeWyUhW7oGsvIEBTOGixEfXDPgfUcbRl3AvOYtxDon0y4U5g6eR6UcHZkKyofIUthws3pO4snotopQ5w6ap6VeQhJwh7CY3i3FIszIs3tSju2XaLqkfVMe6iZZdGZYLBcDOt1IuhtGXcCwizRSM0U5iWv9Au8pgDGMCujFR4LRlIzBPTdfaxJ8/ArEzY+9pYI+YY/UwLmXTZynUcdODfwuDNATOrMI7CmEhbD4fTg/ImvXzkFIcCZw0MONM2NFztnLoglIWsUXc6qJjyNusptYnpQEsvvvGvmQcAc61PNGBjSykNqtq/AC8gk40lpyC7yQJtbkr36CXT/AmkcDwjim8SVi1cSA5E+SFiWXOTRTBRY6ocdPR1iXUji3WcsWNK6aSPi2rP0tXsqL9Qmzl1WXEGYFsqAOrVLqAzxBtwZQJKihBhchlUyyMOQ2HfinZ544gkc/J5dfm/yJC7zz7peWeaqZMVS95MSfpua5/oZFGZxNMqMjls9eW49b835MpTxa3rEFuN1creqDHWtfjM1+/VqqPJPs8NWeUjN9hV4eJmAZaXssV+wHlFUU0R/khLQC5iSognFDpR6r85r/7VnB/5GQ7WHArvbSCJGcJwT2+POVNpDAD7ctci/aveYoU0RyA0B1mvf9YrVqsJTnhNtTc3U0+zPp/9jTeSqZqoKaeUhNdtX4OFlEjun9cmKgEJlssKng2slQNQHLzPyFCM43h1p7dkbuk1WkwydzXk2mDh7PnluVM2JgI8EGsXulzodtna2O9kh8JcQgoxXgfRYiSZj0T3imHkQMLc101BIK68xu/Ln978Y9XrPHfYqW3/HnNEOOZNQnieeE16bdSQg4V5HYCpeRwJEpRPy7jnIxAh5Mu3me++9Z9NKiIAIiEAQCVR8EbOq8y5O9+SPk2ZsN3MA/xbdC2nlpfWqO/aZ8r/5PTr2yaVqN2MUWiZhPU8yvjR0IAv4CoII+Eqgdc3H1yZUuQiIgAjkm0DFl7EW2/XId7tqTwREoLgJKMa9uMdfvRcBERABEagrgcqlzozYgjwS7nUlp/IiIAJZEpBwzxKgDhcBERABESgyAjMmOFWVTvmaTvM/3xlXZP1Xd0VABApGQMI9hp5X4bBaOeswbrvttuPGjUs4GqxO2LVr1/LychbeZsFs1pgzxXgvJstvs9gcu1hfnGWwtcJmQoDKFAERCBQB3kLAi7f4JHt1V6CsDZYxFV/F7CFOpsS8+C5Y1skaERCBCBNQjLszfPhw3vXIIieodtQ5b43hhTIEZrtHnfWnL7744oceeoiX4/CWSt7RU1JSwkttKMN7be69995HH32Uxbw/+eQTXsfDGu28kNJ9uNIiIAIiEDQC6HVemBA0q8JhjwLcwzFOslIEIkhAHncH/T1o0CAEN+8KRb7zhnkEumeoR48ezRLURx55JI553v7IK0KtY55duKx4gz27/va3v7HX7vJUok0REAEREIEoEJBwj8Ioqg8iEEoCxS7ceXfgp59+ysvezejxwkvSY8aM8QwmjnaKGUX+888/86JT3vhlyrCL98/jhmfzyy+//OCDD3j5vOfwsG/yKtOzzz47pL24+uqr6+pW/OWXX7ij8sUXNcu95aLbnGasYs4cLxeVqY4EBI444ojbbrstwQ5lJSfAi3754uJDInkp7YkjsKLSmT4+lttO9yvi4ChDBETAZwLFHioza9YsgtTbtGljOZP+9ttv7aZJ4GunZJ8+fYhfr6ys5KVCvAbS7CKEhrf1brTRRtx3pqrrr7/+qKOO8hzO5tKaj8mnPAlexM3HJPhLtVTOL2jdfkSrVjiTxzgLpzlN2jp/6ZX71W2Nxbx9u8a2lVuB/p+BePbZZ+1LkYiDOu200+pEtUOHDlOmTGnZsmWdjgKKebwhnhXBVNyQ2W677UyFHgvzT/ORRx4By+zZs+vaNFMaHuQwR6255prdu3dnnf4ddtihrvXkvDzXI9PL4447jkA1T+UwZ0S41sDu2ZVi031tpigW6l3MJ4cNG0YXeNNCgwYNQt2XFMbnfihnflu/ckl1gyaVTdfhezxF09qVWwK5H8rc2qfa0ibgGUqzmfbRxV6w2IV7muPPK4FuuOGGe+65hzj4H3/88ayzzuIhVJ5J5fCnnnrq8ccfJwieGHd8tHim27dvP2DAAE/NN9544+DBg92Zb7zxBmE5Ngd3bNu2bRcuXMivqc1Mnaj/42vl7w0uXVhhilU1abd456uWb5B7fz+TCqwy843UJgVk7+LFi93W1q9f372ZjpEMzaJFi9IpGV9mwYIFJpOJHJ57Pnfdddcll1zitsFjYXwldc1hgNLXXjxajZZ125Nmc5yflBwxYgQz1d9//x0n9wEHHMCjHZ5nQtKsLYfF/vKXvzA1evDBBwl781QLGWi///77nMaeXbVuvvnmm7WWCW8Bzk9j/Ouvv16nWU0Yu5zDoVxn9odbOs7v9Tt8+NrIMKIIu805HMqwowi7/XYoM/61DTuBDO3n97uYP/jB+cV6/vnnLYT+/fsfeOCBdtMk8LXjlLKZ//nPf1hDhp89ctZZZ527777b7kLQs/iM3bQJpNK8lZ9ff/2V0cKFj6Tg88cff6CEcH/+73//I021aX3Gj6i6qnnVVc2qV/6r2Wy+YvyItA5PWQhJd/TRR6+xxhrMJf7+97/vtNNOPG7LEVxdeGqZmaBrt9lmG2KETDVDhw7F0/nCCy9suOGGkDnkkEMQrzwq0LFjxxYtWpx++ul005Sk19RMJsV4DpibGyY/2V/mRTx7gCSlKiyxxdhkItS3b18swR6UsdlFvr0SSJN55ZVX9ujRw+w1g3vdddchNDGYKBpOgPPOOw/nMV52ZJ8p9tNPP1EJwVFscoit0CRMrxOiQBqyQhE1c0ZtvPHGnFpUNXbsWEKw5s6dayrnL/VwT8Bu2sT999+PIG7YsCGnECeVzb/gggu6dOkCMRYvuuyyyziXzC7TNY5CszI9IJOa2eShCwoTnIMZthKbwH53j6iEXWmOi5sMR5loItsKoWKMKacNeLnvNH36dNPoK6+8wiMiYFlrrbWIMSM2w+Sjp0899VTOMbqM8uZulcmfOHEi8wHqadq0Kc+NTJ061eSb/nK7gJHl5buHH364myqjyXVqSrr/ck1xZXFK11xt6f4xVyV/0z0ghOWYhgGNj/EXhLAHaZmc86GsfOUCvnUrX74greZVKHcEcj6UuTNNNdWNgGco+QHiVwmJZCWTEikIFLvHHUW45ZZbImVMZAV31UkjNN3KhjQqDe1lM413CqwJdyWMr0Ca8LE1kMANzMfmlJWVob1o5c+GqHx5co8vETIjLyI0wx5OoiS2WVLy+sVO511SxczUb0wp94Hx6YsuuggPJUIcBUYQwmeffUaYOIYh3ydMmPDkk0+ilZFriLCvv/4aTckuEKE12YVkR7gfeuihqHMeBuCRANIoKkQ2DRHM8MMPP7z44osIL1rZf//9qdDNwW0M0pnYZYQFx3JHApFH+ApL+pgyt956K7YRqoG/kBsdiN099tjj448/xuaHH3547733ZpgwDKqUN1RJv/vuuyzoSe8+/PDD448/nucZdtxxR7Q1iwudcsop6E5mYqYwf/n885//ZOEg0+JNN91EaAETCfITojBhJKBgjsE0YO2110aVmvkMstVUYv7W1L3qjCKT+zb0FIabb775559/jue4SZMm5tYNrFCrMIc2+WxeeOGFHEJ3uP/DQDz33HOms2QydbzllluAw2TmmGOOmTRpEnLZNGr+MhasnoQIZvUkcmgFY9IcF0pyiDEe2f3YY4+xyTqq5KCheT7khBNOoHJ2MbiM3TvvvEMBNpnvbbbZZghE2uV8QPFzCJ196aWXmJuh2pnN8iGTy+fggw/Gqv/+979MhAhz4llwbnlRD/1l5sDJ8/LLL8+ZMwfhTk+R++ziw90wbotxy9VzoVEnB9ZcbasuN3NIrX8zO6rWagNSwHyDYUy0u2lo57KPNQHu9TpsXs/1BR6QMS0GM3I5lMXAK8B9tENJIsBmBs+0FKK+SHahNfmlRxihIE888UTk5rRp0+g7oof4dQPhqquuwvmHaEOGEuKCPkM0mF1IK5y1KAnchOgnlCWiKjU6ppWcCHZyyUQVjzseQQxA4vx57NKF1pWe4wQ1p/ygvJnPIKdMKSIicN8SHYQERB0S/G2P3m233YgAYROhTI8QkWbXSSedhCOceswmapgc0rhaKYZiNvlMsqnZNmQy3X95tAAtbnNwPCOazSY+V6S53YWy55lgs0kTaFm7i7HD4242GSzjhjebaH3is00ajYiLlyFmk6GkEtSz2WX+4iNHofLwMZvJUODoxePOsQhTeyzodt11V7tJwmOh2cVJRcCVLYb+7tWrl920CaYETDXNJl3j+27GjBl2LzVffvnlZtOEtbz22mt2r00wXkwk7Gb642LIMGqwQg3THMZwAlMVBrOkkq0TFc5e5gY2xyRmzpxJPjMQNs844wzImBh0W4zri9Ns8uTJJgdnOeV5LpxN+st5xZVidnE+INbtgfj7KUkUvs0xCa6p1a4sz+4km+aqNF1LUiT02dxuivnba+47hb4zyTuQ46HkRuv1HWLfydPGJ29Te3whkOOh9MVGVZoWAc9QekRRWlUUcaFi97jzS4/mQ0zgCESv41ceOXKkeVYV6YCvjgJ8EEPIFP4iW1u1asV9fOvnw69JsDvOYPQTPlEUKlWZo0L6F6cmFxWSyNiPvxaBSxqxhTAlGMb2ix9+nMpmE0VlH1sE4HrrrYfT1OxiEzikv/nmG24s2Jo5lprJtBV6Euwi6sNmEm6BNxcbEHZkomvtLtLsspspEjyKYIcVwzbddFNTmDqxx9gZfzginokcHmJsYG9qFEx78C7bShCOKH67mTDBfUOwcwfAhmgzkbBOeu4G4PinAFqcfDzuthLmIZyQdpOEbRptTUnTI3rNZIO9TFSQ8u7ypFOMC9OhUaNGUYaGjIYmjT2E9IwfP545KjNe4yxBN3M3ww66aQKbOWG4x8JFwW0NpmrmfhQXF+S5ecLEjHOAORj3XtD9xhhuifAxNTBVYy6NhVtvvTU5nFdMoc2udu3auceL6QT53O4we/VXBHwhMGeis2yBU9bIaRn7VtRHBERABPJMQMI9BpzYmPjwGHN33owHchNvH5/44UFGIBnTVI3xhyfNIaDl0qlJ904a7Tz+t6R7j3rG6bh90r3UnNEH1Yi6JXzF6GZThxVq7ltdJjLBNsJmwvAhWyCfiQzsZEbHYw8EgSCsjampUaAgjTfaFOYmDEI/dR+Nd/yBBx6wsxrKG84E8xAvTkA/Ny6Q8twgcq97iDr31OzpoCFPzJJ5bN+oW88hKTYJ+GHiQQF3tahq4qP4MIsgrAUFzz0rusCE1oYVmTrR1iTIR/fTO2a22INkZ2ZI/hZbbIELn4nEW2+9xS0sIm2eeeYZc2Cyv24zPOeVWSTHM41JVo/yRSBDAmYF9zabOPX065khQh0mAiKQDQF99WRDz89jiUNo4NVkq9rrvKvTrL0zn/VkYnEXrk9JLJ+9pTGfdGYfHOfII/yjRB5TA8HEhFLwfCqx13i78XFmvPwfz2si9aiZxe+pmSAcQilwqSazk/LE1di9pHHf2mnDRx99ZHeRprDZxHjstLuyTPAkKF5/HMzmRbmmtmQoEs5PKMxykNzWc6t5j1X4/hG1BGLFryVKcD+ql2dSzSHGce45vNZNanCX4Z6AG1GKcSEMzH2gJ82To7jSWW3pnHPOQYUTTYRHnFmuu5gZZVS7OW2INXLv5Z4At7z4UBV+d8Q3xtSEu/9qnO5EuRA9n+IksbUxf+D5BKZJNkeJ1AS4lMxLJ+w1lbq8C3X1xgAAQABJREFU9sYI6NVLOg9EQAQKSmC1n9iCWqLG60IAXb73zc5TLHhCnLHV7rGYY2fvm7JR7VSAEx3XMgHEhI7woCeS0cSWIJqRlayygscXMUp8EQ/yEpjBW2PTNB0fLSKYaBBWPuFOBY8QoAvdwTCeeljvhQAJgqcRdjieiVRBI9oy6HieTeSpYpaUevrpp1m6xOxCO2IYMS24gVkuxpbPLEHsEzqSCk1wNpUQO5QMRcJ3b+2yyy54o4kzsWE5VIKn2SzJYqyCDD51HnjFp45+JQaJNRaZMvFMJ7sILMHRDgr6SPh+Zh1xHwUiTKJTRP8T41TXcbFVMRXBZoKkocRTpKhzHiQlfgZEPPCAzTjsGQJOpH/961943+kIg24PZy5EJucSJxgjyIO8RMXgd2d5eM407mIxzSMIjUnjVlttZY9KliCkxwTbJCugfA8B9DprQ3kytVkLAQn3WgBptwiIgL8EVlvXwt+mVHtuCXQ70Dn8306zWCjCnx987eSQn/WHJyDxjxLhgIpiERIeQDRV8lAjwh09TVwyipklXIxXPv0GqYHaCGg2T14SwuEOfvDUgxOXR1fRf0hePLssIGOXlKEkZqBukX0s74gEJJLEHM68AimPv5Zdngoz2GRtk4qKCjy+SEzzwQVOPemjQLYST8KiMe7WUeSYZz/E0BOKg8ylWmQrUpXY8U6dOnEIUTr4swnl4gEMmjZvD3BXlUGaOx68RIzpEIElTH6ooU7j4m6R530JwmFOxR0DplI48lHPdIF1flDhKHI+jCARVgwiHeHUsoczeaN1RDlzEh4q5WSgMJMB1uFB7rPaD6ff+uuvT0i9PSRZghsjPOFtnxBIVkz5IpAVAZ4sN8K97aqHWLKqUAeLgAiIQB0JlHAHv46HqHi2BFgWA8cqj1GbpwzRPUgWltf47bff0Gq1Psi4WvOsC0m8+8LpTpM2sbj2LCJkVqs28Bv4jJGGfAJlKaEyDC7DigB1G/bVV1/xFCZPatpHAtx7lc6eAMFI3ItgRZr4qtD03N+o65VlrkoWPE0xsYxvK1w5nK7cA8Fmpt+eMzZcHUltbS6Hct5vzj82cUrLnEumOPVreeI8tVXamwGBXA5lBs3rkNwR8AylRxTlrp1o1qRQmZCPK0q90w4h70P0zSegiKc2kY+4oqPf20L0EHnN+k6FaDnEbRKJ9Oijj9IBFnXlsYcQ9yRvpht3e6uNpdrzhlwNiYAIeAis5hf07NOmCOSHAKHhuKI9H16mk5/W89MKQT5S7f6hJtDILFrqXxOqWQT0ZKrOAREQgYITkMe94EMgAxy75qCbhed9n+5dpAmJ9uRoUwREQAT8JaAnU/3lq9pFQARqJyDhXjsjlfCbQOo1B/1uXfWLgAiIQFoE/hTuejI1LVoqJAIi4AcBhcr4QVV1ioAIiIAIRIvAwhnOAl6dUeK0+fN1y9HqnnojAiIQDgIS7kEZJ/N2noRv8AmKibJDBEJIQAtnhXDQAmlyxVcxs1p2cRo2CaR9MkoERKAoCChUJijDzLIYrMg2depUltZmhYcUb9kMisWyI44A865ly5ax/mCEF9eL63SgM1DtvDmLqynCqzoGegCiZFzFF7HetOsRpT6pLyIgAqEjIOEelCFD6rHUNO/6QbsHxSbZUUcCyMTFixeXl5dr3lVHcj4WZyzWWWcd3hLqYxvhrBomvOIK2wUnrQHUq5fSwqRCIiAC/hKQcPeXb51qx9HOm1BYXJnXT9bpQBUOCAFeKvH+++/zyk/5dwMyIpjBWEiYJhwOsPTu3TvhLmUmIKAlZRJAUZYIiEC+CUi455t46vbMPX3JvtSUArsXJcS8i3ffagQDO0YyTAQyIbB4jjN3UuzAdlpSJhN+OkYERCBXBCTcc0VS9YiACIhAmAjwSAaxeVjcrl07PZVRy8hN+zpWoEVHp3zNWkpqtwiIgAj4SUCryvhJV3WLgAiIQFAJcHeId5/xIRFUGwNjl+JkAjMUMkQEipyAhHuRnwDqvgiIgAiIQG0E9Oql2ghpvwiIQH4ISLjnh7NaEQEREAERCC0Bs4h7u56h7YAMFwERiAgBCfeIDKS6IQIiIAIi4AuBZX84s76P1axF3H3hq0pFQATqQEDCvQ6wVFQEREAERKDoCEwb7zjVTtN2TpPWRdd3dVgERCBgBCTcAzYgMkcEREAERCBQBPRkaqCGQ8aIQHETkHAv7vFX70VABERABFITMMK9rVZwT41Je0VABPJBQOu454Oy2hABERCBoBHgfWE77bQTVunNsrUMzbQvYwUU4F4LJu0WARHIBwEJ93xQVhsiIAIiEDQC6PWdd945aFYFzp7Kpc6Mb2JWSbgHbmxkkAgUIwGFyhTjqKvPIiACIiACaRGYMcGpqnTK13Kar5NWeRUSAREQAT8JyOPuJ13VLQIiIAJBJVBdXT1z5kysa9WqVUlJSVDNLLRd9tVLQlTooVD7IiACEJDHXaeBCIiACBQjgeXLl99b8yFRjP1Ps89aUiZNUComAiKQFwIS7nnBrEZEQAREQATCSODPd6b2CKPtslkERCB6BCTcozem6pEIiIAIiEAuCKyodKbz9iWeTO2Zi+pUhwiIgAhkS0DCPVuCOl4EREAERCCaBGZ971QucRo0ddbsFM0OqlciIAJhIyDhHrYRk70iIAIiIAL5IfDnq5e6O6X6rcwPcbUiAiJQCwF9GdUCSLtFQAREQASKlICeTC3SgVe3RSC4BCTcgzs2skwEREAERKCQBKZ9FWtdr14q5BiobREQgdUIaB331XBoQwREQASKhABvTu3VqxedJVEkXa5bN6uqHC0pUzdkKi0CIuA7AQl33xGrAREQAREIIAH0+p577hlAw4Ji0pyJzrIFTlkjp+WGQTFJdoiACBQ9AYXKFP0pIAAiIAIiIALxBCq+iOW12cSpJw9XPB3liIAIFIaAvo8Kw12tioAIiEBhCVRXV8+bNw8bmjdvXlJSUlhjgti64mSCOCqySQSKnYA87sV+Bqj/IiACxUlg+fLld9Z8SBQngVp6rSVlagGk3SIgAgUgIOFeAOhqUgREQAREINAEqqsdCfdAj5CME4EiJSDhXqQDr26LgAiIgAgkJTDvN2fxbKe0zGndLWkZ7RABERCBvBOQcM87cjUoAiIgAiIQcALG3d5qY6esYcAtlXkiIAJFRUDCvaiGW50VAREQARFIg4BevZQGJBURARHIPwEJ9/wzV4siIAIiIALBJqAA92CPj6wTgaIlIOFetEOvjouACIiACCQhIOGeBIyyRUAECktA67gXlr9aFwEREIHCECgtLd1qq61om0RhLAhsqwumOwsqHKck9vYlfURABEQgSAQk3IM0GrJFBERABPJFoKysbL/99stXa6FqxwS4t+ziNGwSKrtlrAiIQPQJyNES/TFWD0VABERABOpAQHEydYCloiIgAnklII97XnGrMREQAREICIHq6upFixZhTOPGjUtKSgJiVSDMkHAPxDDICBEQgQQE5HFPAEVZIiACIhB5AsuXL7+15kMi8p2tWweNcG+7Wd2OUmkREAER8J+AhLv/jNWCCIiACIhAWAgsnuPMnRQztp2Ee1jGTHaKQBERkHAvosFWV0VABERABGohUPFVrECLjk75mrWU1G4REAERyDsBCfe8I1eDIiACIiACgSWgd6YGdmhkmAiIAAv4CoIIiIAIiIAIiMCfBPRkqk4FERCBABOQcA/w4Mg0ERABERCBPBOQcM8zcDUnAiJQFwIS7nWhpbIiIAIiIAIRJrB0oTPrh1j/2vWIcC/VNREQgfAS0Dru4R07WS4CIiACmRMoLS3t0SMmT0lkXkvEjpw+3nGqnabtnCatI9YzdUcERCAaBCTcozGO6oUIiIAI1I1AWVnZQQcdVLdjIl/aLCkjd3vkB1odFIHQEpCjJbRDJ8NFQAREQARyS0AB7rnlqdpEQARyTUAe91wTVX0iIAIiEAYC1dXV5p2p9evXLykpCYPJ/tuod6b6z1gtiIAIZENAHvds6OlYERABEQgrAVT7jTUfI9/D2o0c2l251Jn5Taw+hcrkkKqqEgERyCkBCfec4lRlIiACIiACISUwY4JTVemUr+U0XyekPZDZIiACkScg4R75IVYHRUAEREAE0iBgA9wVOJQGLRURAREoCAEJ94JgV6MiIAIiIAIBI/CncN8sYGbJHBEQARFYRUDCfRULpURABERABIqXgPW4Fy8C9VwERCDoBCTcgz5Csk8EREAERMB3Aisqnen/i7XSrqfvbakBERABEciUgIR7jNyQIUPWW2+9Ro0abbvttuPGjUsI84477ujatWt5efm66657zjnnLFmyxBTjQFZSc39OO+20hDUoUwREQAREIKAEZn3vVC5xGjR11uwUUAtllgiIgAg4jtZxd4YPH37uuefed999qHbU+V577fXdd9+1br3a+66feOKJiy+++KGHHtp+++2///77gQMHotRvv/12TqGPP/54xYoV5lwaP378Hnvscdhhh+nUEgEREIGAEygtLe3WrRtGkgi4qfkwzwa4i0Y+cKsNERCBDAlIuDvo70GDBh177LEgRL6/8sorCHRkupvo6NGje/fufeSRR5KJi71fv35jx441BVq1amVL3nTTTZ07d95pp51sjhIiIAIiEEwCZWVl8jKsGhoj3NvqydRVSJQSAREIIIFiF+7Lli379NNPL7nkEjM2eJ523333MWPGeIYKR/tjjz1GFM0222zz888/v/rqq8ccc4ynDFVRBuc9znjPLjaX1nxM/vz580nw0hPz3hP33/gDlRMiAhrKEA1WalM1lKn5hGhvmkNZb+rn3HeobL0pr5MNUe+KytQ0h7KomIS0s56hNJsh7Uv+zS524T5r1iwCXdq0aWPRk/7222/tpknga6dknz59eEl4ZWXlySeffOmll3rKjBgxYu7cuUTRePLNJi8oHDx4sHvXG2+80bhxY5vz5ptv2rQSoSagoQz18LmN11C6aYQ6XctQVlftN+ULhPv7P8xb8Nuroe5p5I2vZSgj3/8IddAO5aJFiyLULd+7UuzCPU3A77333g033HDPPfcQB//jjz+eddZZ11577RVXXOE+fOjQofvss0/79u3dmTaNUx9nvNnE484TrnvuuWezZs3IYa7J6UtwfP369W15JcJIQEMZxlFLaHMxDCU3CW+99Va6f/755zdo0CAhhwhkpjWUs38q+2JJdVmjHQ4+zinVz2JAhz2toQyo7TJrNQKeoTRhCKuV0EZyAsX+DdWyZct69epNnz7dIiLdtm1bu2kSaHRiY0444QQ2u3fv/scff5x44omXXXaZfahr0qRJb7311nPPPec50G42rPnYTRLIdLdS92y6SyodLgIaynCNVwproz2U3D80fY92N9Pq48zYQpAlbTat37A8xfmgXUEgUAynaxA458EGO5Qk8tBcZJoo9sUE8DNtueWWb7/9thnRqqoq0r169fIMMPdxrEZnF1qfv/Znj/TDDz/MQjT77bef50BtioAIiIAIBJ2AXVIm6IbKPhEQgWInUOwed8afCJYBAwZstdVWPHjKcpB4080KM/379+/QoQOx6ZQ54IADWHxm8803N6EyOODJMfKdvch9hDuVsEpDsZ9Q6r8IiIAIhI6A3pkauiGTwSJQrAQkNJ2+ffvOnDnzyiuvnDZtWs+ePUeOHGmeVZ08ebL1sl9++eWsFcPfKVOmsP4jqv3666+35wxBMhQ+7rjjbI4SIiACIiAC4SBAyJCEeziGSlaKgAjoBUw158DpNR/P6cADqTYHV/pVNR+b407wmKk7bMa9S2kREAEREIFAE5j3m7N4TuyZ1Nax11HpIwIiIAJBJlDsMe5BHhvZJgIiIAIi4DsB425vtbFT1tD3ttSACIiACGRHQKEy2fHT0SIgAiIQTgKEAnbp0gXbbUxgOPuRtdWKk8kaoSoQARHIGwEJ97yhVkMiIAIiECACRADyarkAGVQoUyTcC0Ve7YqACNSdgEJl6s5MR4iACIiACESGwLSvYl1p1yMyHVJHREAEIkxAwj3Cg6uuiYAIiIAIpCSwYLqzoIKXLzltN01ZTjtFQAREIBAEFCoTiGGQESIgAiKQZwLLli279dZbafT888/nVXR5bj0ozRl3e8suToM1gmKS7BABERCB5AQk3JOz0R4REAERiDSB5cuXR7p/aXSu4otYIcXJpIFKRURABIJAQKEyQRgF2SACIiACIlAIAnoytRDU1aYIiEDGBCTcM0anA0VABERABEJOoEJPpoZ8BGW+CBQZAQn3IhtwdVcEREAERMAQ4IWpcyfFkm27C4kIiIAIhIKAhHsohklGioAIiIAI5JqAcbe36OiUr5nrqlWfCIiACPhCQMLdF6yqVAREQAREIOgEFOAe9BGSfSIgAl4CWlXGS0TbIiACIlAMBEpKSjp27EhPSRRDfxP0UcI9ARRliYAIBJqAhHugh0fGiYAIiIBPBOrXrz9w4ECfKg9HtX++M7VnOKyVlSIgAiLgOAqV0VkgAiIgAiJQfASWLnRm/RDrdrvNiq/z6rEIiEBYCUi4h3XkZLcIiIAIiEDmBKaPd5xqp2k7p0nrzCvRkSIgAiKQXwIKlckvb7UmAiIgAsEgsGzZsjvvvBNbzjrrrAYNGgTDqDxaoQD3PMJWUyIgArkiIOGeK5KqRwREQARCRmDRokUhsziH5kq45xCmqhIBEcgXAYXK5Iu02hEBERABEQgOAb0zNThjIUtEQATSJiDhnjYqFRQBERABEYgGgeVLnJnfxLrSVk+mRmNE1QsRKBYCEu7FMtLqpwiIgAiIwJ8EZkxwqiqd8rWc5uuIiQiIgAiEiICEe4gGS6aKgAiIgAjkgoANcC/al0/lgqLqEAERyD8BCff8M1eLIiACIiACBSXw56uXehTUCDUuAiIgAnUmoFVl6oxMB4iACIhABAiUlJS0b9+ejpCIQHfq1oU/Pe4KcK8bNpUWAREoOAEJ94IPgQwQAREQgQIQqF+//qBBgwrQcMGbXLHcmcbbl3hnas+C2yIDREAERKBOBBQqUydcKiwCIiACIhByArO+d1YsdRo0ddbsFPKeyHwREIGiIyDhXnRDrg6LgAiIQFETsHEypfoFLOoTQZ0XgTASUKhMGEdNNouACIhAtgSWL18+ZMgQajnttNMIm8m2uhAdr1cvhWiwZKoIiMDqBCTcV+ehLREQAREoDgLV1dXz5s2jrySKo8cre2k87nr10koe+l8ERCBEBHSjMESDJVNFQAREQASyI1BV5WgtyOwQ6mgREIECEpBwLyB8NS0CIiACIpBfArN/dpYtdMoaOS03zG/Dak0EREAEckBAwj0HEFWFCIiACIhAOAhUfBGzs82mTj1FioZjxGSlCIiAm4CEu5uG0iIgAiIgApEmoDiZSA+vOicCkScg4R75IVYHRUAEREAEVhKwa0GuzND/IiACIhAiArpXGKLBkqkiIAIikDMCJSUlrVq1ojoSOas04BWxfs6fwr1HwC2VeSIgAiKQkICEe0IsyhQBERCBiBNg7fZTTz014p30dG/er87iOU5pmdO6m2ePNkVABEQgFAQUKhOKYZKRIiACIiACWRMw7vbWGztlDbOuSxWIgAiIQAEISLgXALqaFAEREAERKAAB887UtoqTKQB7NSkCIpATAgqVyQlGVSICIiACISOwfPnyBx54AKMHDRpE2EzIrM/MXAW4Z8ZNR4mACASGgIR7YIZChoiACIhAHglUV1fPnDmTBknksdmCNiXhXlD8alwERCB7AgqVyZ6hahABERABEQg8gQXTnIXTWETHabtp4G2VgSIgAiKQmICEe2IuyhUBERABEYgUARPg3nJDp8EakeqXOiMCIlBMBCTci2m01VcREAERKFoC076Mdb3dZkULQB0XARGIAAEJ9wgMorogAiIgAiJQGwEFuNdGSPtFQASCT0DCPfhjJAtFQAREQASyJiDhnjVCVSACIlBwAlpVpuBDIANEQAREoAAESkpKmjdvTsMkCtB8npvkhalzJ8fabKtQmTyjV3MiIAK5JCDhnkuaqksEREAEwkKAtdvPPvvssFibpZ0l07+O1dCio1PeIsuqdLgIiIAIFJCAQmUKCF9Ni4AIiIAI5INAybSvYs200ztT80FbbYiACPhHQMLdP7aqWQREQAREIBAEJNwDMQwyQgREIGsCCpXJGqEqEAEREIEQEli+fPkjjzyC4QMHDiRsJoQ9qIPJK4V7zzoco6IiIAIiEDwCEu7BGxNZJAIiIAL+E6iurp46dSrtkPC/tUK2UG/FEuf3n2IWaBH3Qo6D2hYBEcgBAYXK5ACiqhABERABEQgsgeaLJ5c41U7Tdk6T1oE1UoaJgAiIQDoEJNzToaQyIiACIiACYSXQfPEvMdP1ZGpYB1B2i4AIrCIg4b6KhVIiIAIiIALRI9Bi0aRYpyTcoze06pEIFB8BCffiG3P1WAREQASKiUDzRb/EuivhXkyDrr6KQFQJSLhHdWTVLxEQAREQAcepXNJ0yZQYCAl3nQ4iIALhJ6BVZcI/htHqwYqq6nETZ89YsKR100bbdFqrXmkuX8bud+VjJ87+dFbJ2hNn99qgdQ4t99vsMAL3m4lPQ8nF6p/lGdTcuHHjdL8/qlY4k0Y7C6c7Tdo4Hbd3Suule2Ct5fyrmaarVpR++WSpU1XdoGlJk7a12qICIiACIhBwAhLuAR+g4jJv5PiKwS9NqJi3xHS7XfNGVx3Qbe9N2+WEQr4qr/fvHz7JoeX5MjvGOIdmU5t/lvtX8+pm53goV6+crVwCz4BJgwYNLrjggpgdtX4mvOiMvMiZH1s7MvZp1t7Z+2an24FmK6u//tWMWTWV16sxu2TZAufO7jkzO6s+62AREAERyJyAQmUyZ6cjc0sA5XHKY59Z1U7l0+YtIYf87BsKaeUhNZvx8s9y/2r21WxfK/eVSUz+PtV/lWqnJ/MrYjnkZ/nxr2YM87XyLDuuw0VABEQgUwLyuGdKTsfllAB3+fG1e14DYzbPe/rLTybNKS3JPGamqrp62NjJoas8pGZzXvhnuX81+2q2r5UnY8IFwzW1R7e2WUVtEceCr51F0Ff71GyOOMX59SOnJFPvT3WV8+mjvtSMqUkrL3FGXuxstF8uQ31WI6MNERABEfCXQEnk35nnL7+Map8/f37z5s3nzZvXrFkzKuDF46+++uq+++4b+beOp6A15qff+z3wUYoC2iUCIpABgWGDtuvVee2EB/LN8/jjj7PrqKOOSvrlM3GU8+j+CQ8PceaAl51OO4TY/mI1Xb+VkRl5z1B6RFFkuulTR+Rx9wmsqq0bAZ5GTXHArhu13qB1kxQFUu/6ccbCd76dkaxMYCsPqdlw9s9y/2r21WxfK0/NJMWVhddm0qRJ2JbKfcPTqCk+XfZyWm2YYn+qXTO/d354PWmBbGqm0tSVp+5UUpu0QwREQAQKT0DCvfBjIAsgwBoyKTgM2mH9ZF7DFEfZXbjzUwj3wFYeUrPB7p/l/tXsq9m+Vp6aSeory14jSROsIZPis/0Zmbuu8eWnEO7Z1IzBqStP3akU/dUuERABESg0gUzDEwttt9qPGAFWfmRJk/gwdnLIZ282/Q1p5SE1m5Hyz3L/avbVbF8r95VJbOVH1pBxEl2azTrE9mb88a9mTPK18oy7rANFQAREIGsCEu4xhEOGDFlvvfUaNWq07bbbjhs3LiHVO+64o2vXruXl5euuu+4555yzZMmq0I4pU6YcffTRa6+9Nnu7d+/+ySefJKxBmSkI8PwcKz9SwC0QTJr8rJ6uc5yQVh5SsxlE/yz3r2Zfzfa1cl+ZxB7iZOXH2Cfu0tz7pqwe8fSvZoz1tfIaHPojAiIgAgUhIOHuDB8+/Nxzz73qqqs+++yzHj167LXXXjNmeOOhn3jiiYsvvpgy33zzzdChQznk0ksvNQM2Z86c3r1782jXa6+9NmHChNtuu23NNdcsyFiGvVHWa7/36C1aNmloO9K2eSNycrKOu6mcCsNVeUjNBrJ/lvtXs69m+1q5r0xi67Uf/m+nmet1Cvjgycl+HXf/aga3r5VTvz4iIAIiUAgCWlXGwcu+9dZb33333fCvqqrCoX7GGWcg093DcfrppyPZ3377bZN53nnnjR079oMPPmCTkh9++OGoUaPc5VOnPQ9Qex6vTn1s5Pd+8P3Mox8a16ZZwzv6bk4MAN7EHHY5g1dLpt86lY/5ccYbo8buucO2enOq4eYfcP9qxnL/htJU7tOrauvKZNmyZTfeeCMmXXLJJbyMyQxZ0r/+vd/Uv5rpTNWKyp/f/2LU6z132Kts/R2zukWQFI125ImAfivzBNr/ZjxD6RFF/rcf7hai83Aq7vDjjjuuY8eOdRoQfro+/fRTfrfMUaWlpbvvvvuYMWM8lWy//faPPfYYUTTbbLPNzz//zOqNxxxzjCnz4osv4qQ/7LDD/vvf/3bo0OHUU08dNGiQ53Btpk+gYn4sBmnDNk2zeRo1WXNMA/yo1jRH5dt2Wuv3b6r5m9v5ht9m+8rEp8r9ZuLTUHKq+Gd5BjUnXQUy/hIi+MSnJRT9q5lelNar7thnyv/m9+jYR6o9flSVIwIiEDoC0RHuL7zwwvXXX7/TTjsdf/zxhx56aMOGqyIuUozKrFmzVqxY0abNqpUTSH/77beeQ4488khK9unTh3XTKisrTz75ZBsqg46/9957CbYh5+OPPz7zzDPxXQ0YMMBTw9Kaj8lkckmCGScfk7B/TYFi/vvr7D/ofrtmDQ2ccKFwD2i4LJe1HgLFMJQlJSUXXHCB6XgYLzfPkCXbLIahTNb3iOVrKCMzoJ6hjPD3jx9DFqlQmc8///zhhx8eNmwY2vqII47AAU8MTGpqU6dOxU0+evToXr16mZIXXnghvnMiYdwHvvfee1R43XXXEVfz448/nnXWWbjVr7jiCsog07faaitqMOUR7sj3eJ/91VdfPXjwYHedxM03btzYnaM0BIb9VPrRjNJ9112x1zqelzUKjwiIgAiIgAiIQNQILFq0CPeofStl1LqX6/5Ex+MOmc1rPjwe+tJLL6HgeWZ0o402wgE/cOBA3lSaEF3Lli3r1as3ffqql4yQbtu2racwGp3YmBNOOIF81o35448/TjzxxMsuu4zQmnbt2nXrFlsOxXw23njjZ599duXWqv+JxsErb7bxuBNJv+eee9o3p7755pt77LFHHW5br6o4aqmnHvnUmfH7Tltvtu/mHULXN9wGGsrQjVpCgzWUCbGEMVNDGcZRS2izhjIhljBmeobShCGEsSMFsTlSwt0QJJqFc4LgdRIs8MJTp8juBx54oG/fvvGI8ZdvueWWPHV60EEHsZeHU0nzKKqnJNNBNLrNROuTNq8bZHrw3Xff2V3ff/99wjh7Qnc80TvIdLdS92zaCostYWLc1127iRtOuCBoKMM1XimsjfZQcmfyqaeeovuHH354WVkEfwvcIxvtoXT3NPJpDWVkhtgOJYnIdCoPHYnUlzWPmZpQGSRy//79WZ19gw02AOJdd91FBEtC4c5eHOGEpBPuwoOnLNaON/3YY48lnxqIojGrLhxwwAG33347Dn0TKsNMgBwj31nTnUdXb7jhBn78eHr1XzWfPIxcJJtgLjR17mK61qFFeSQ7qE6JQHAI4Kf44YcfsIdEcKySJSIgAiIgAikIREe4E8HCQ6XEn7DOulXVpuf9+vUjKj0ZBQT9zJkzr7zyymnTpvXs2XPkyJHmWdXJkydbL/vll1/Og1z85V1LrVq1on4ehDUVEkb//PPPEwlzzTXXdOrUCel/1FFHJWtL+akJzFm0fMnymIZwL7ie+hDtFQEREAEREAEREIEiIRAd4Y7Dm6dR8ZHHjxyB7KldSsTGxIfH8ECqrYr7yCw3ycfmuBP713zcOUpnRsC423kHU8OyWDCSPiIgAiIgAiIgAiIgApZAdIS7WePFdkyJMBJYGSez6v2mYeyFbBYBERABERABERABPwiseuDSj9rzWSdrt998883uFm+55Rbei+TOUTrgBIxwb68A94CPk8wTAREQAREQAREoBIHoCPf3339/3333dTPcZ599yHTnKB1wAlPnxV6bKuEe8GGSeSIgAiIgAiIgAgUhEB3hvnDhQtZ2dENkgSEtDuoGEvz0lJolZSTcgz9SslAEREAEREAERCD/BKIT486qMsOHD2dxGAvxySefdL8ayeYrEVgCinEP7NDIsOgRwNOR7IH76HVWPRIBERCBaBCIjnDn4dRDDjnkp59+2nXXXRkb3qM0bNiwp59+OhrjVCS9UIx7kQy0uikCIiACIiACIpABgegId9ZWHzFiBC9CeuaZZ8rLyzfbbLO33nprp512ygCKDikIgWWVVTMWLKVphcoUhL8aFQEREAEREAERCDiB6Ah3QO9X8wk4cZmXjMD0+Uuqq50GZaVrr7HaswrJyitfBEQgGwKVlZW8PI4aDj74YF5VkU1VOlYEREAERCA/BKLzcGp+eKkV/wj8GSfTvBEvqfWvFdUsAiJgCPBaugk1n9TvpxMuERABERCB4BCIjpdlxYoV//jHP5566qnJkycvW7bMIp49e7ZNKxFkAlPnLcY8xckEeYxkmwiIgAiIgAiIQAEJRMfjPnjw4Ntvv71v377z5s0799xzeVC1tLT06quvLiBcNV0nAlPnahH3OgFTYREQAREQAREQgeIiEB3h/vjjjz/wwAPnnXcewZr9+vV78MEHWRryo48+Kq7xDHNvtYh7mEdPtouACIiACIiACPhOIDrCfdq0aSzlDrAmTZrgdCex//77v/LKK74jVAM5IqBF3HMEUtWIgAiIgAiIgAhEk0B0hPs666xTUVHBKHXu3PmNN94g8fHHHzds2DCa4xbFXmkR9yiOqvokAiIgAiIgAiKQMwLREe6saMZLlwBzxhln8DKmLl269O/f/7jjjssZKlXkM4EKxbj7TFjVi4AIiIAIiIAIhJpAdFaVuemmm8xI8Hxqx44dR48ejXbnrUyhHp7iMX7+kuULllbS3/bNy4un1+qpCBSQQP369S+55BIMIFFAM9S0CIiACIhA+gQiItyXL19+0kkn4Wjv1KkTnd+u5pM+BZUsOAETJ7Nm4/rlDeoV3BgZIALFQIAXJjRooJedFcNQq48iIALRIRCRUBk8Rs8++2x0hqX4eqIA9+Ibc/VYBERABERABESgbgQiItzp9EEHHTRixIi69V6lA0NgigLcAzMWMqRICFRWVvKdyYdEkXRZ3RQBERCBsBOISKgMw0BE+zXXXPPhhx9uueWWa6yxhh2YM88806aVCCyBlWtBKsA9sEMkw6JGoKqq6ssvv6RX++67b9T6pv6IgAiIQEQJREe4Dx06tEWLFp/WfOxgEcQp4W5pBDmxMlSmUZCNlG0iIAIiIAIiIAIiUEAC0RHuEydOLCBHNZ0lgZXCXR73LEHqcBEQAREQAREQgcgSiE6Me2SHqDg6NlUx7sUx0OqlCIiACIiACIhAxgSi43FP9q6lhx56KGM6OjA/BFZUVU+bv4S2tIh7foCrFREQAREQAREQgTASiI5wnzNnjh0AlnUfP3783Llzd911V5upRGAJzFiwBO1eVlrSqmnDwBopw0RABERABERABESgsASiI9yff/55N0oWTDjllFM6d+7szlQ6mARMgHvb5o3qlZYE00JZJQIiIAIiIAIiIAIFJxAd4e5BWVpaeu655+68884XXnihZ5c2g0ZAi7gHbURkTzEQ4L11559/Pj0lUQz9VR9FQAREIAIEIivcGZuffvpJLxYJxTmqRdxDMUwyMmIEWC3X/cqLiPVO3REBERCBSBKIjnDHv25HqLq6uqKi4pVXXhkwYIDNVCKwBFauBalF3AM7RDJMBERABERABESg8ASiI9w///xzi5M4mVatWt12223JlpqxJZUIAoGVwl2LuAdhNGRDsRDghuTrr79Ob/faa6+ysuj8FhTL+KmfIiACRUkgOl/W7777blGOYBQ6rUXcozCK6kPYCPAE/yeffILVe+yxR9hsl70iIAIiUKQEovMCJt6c+sMPP7iHkc1ffvnFnaN0MAlMnbcYw7SIezBHR1aJgAiIgAiIgAgEhEB0hPvAgQNHjx7txjp27Fgy3TlKB5DAH0sr5y5ajmHtWyjGPYDjI5NEQAREQAREQASCQiA6wp0Y9969e7u5brfddl988YU7R+kAEqiocbc3bVTWtJHWpAvg+MgkERABERABERCBoBCIjnBnabMFCxa4uc6bN2/FihXuHKUDSMAs4t6hhZ5MDeDgyCQREAEREAEREIEAEYiOcN9xxx1vvPFGq9RJsNmnT58AwZYpiQhoSZlEVJQnAiIgAiIgAiIgAl4C0VlV5uabb0a7d+3adYcddqCXo0aNmj9//jvvvOPtsbYDRmClcFeAe8AGRuaIgAiIgAiIgAgEjEB0hHu3bt2++uqru++++8svvywvL+/fv//pp5++1lprBQy4zPESmDK3ZkkZhcp4wWhbBPwlUL9+/bPOOos2SPjbkmoXAREQARHIEYHoCHeAtG/f/oYbbsgRGVWTJwIVc5fQkmLc84RbzYjASgI8F9SiRYuVW/pfBERABEQgBASiE+P+8MMPP/30027kbD766KPuHKUDSMAs4t6uuR5ODeDgyCQREAEREAEREIEAEYiOcOdR1JYtW7rRtm7dWg54N5AApquqqo3HXYu4B3B0ZFK0CfAE/xs1H/tMf7T7q96JgAiIQAQIREe4T548uVOnTu4h6dixI5nuHKWDRmDWH0uXragqLXHaNNPDqUEbHNkTcQLo9TE1Hwn3iI+0uicCIhAhAtER7vjXeTjVPTQ8pbr22mu7c5QOGoGpNQHuqPb69aJzKgYNsuwRAREQAREQARGIBoHoqKV+/fqdeeaZ7777Lt4jPiwEyYIJRxxxRDTGKaq9WLkWpALcozrC6pcIiIAIiIAIiEDOCERnVZlrr732l19+2W233crKYp2qqqpiRcjrr78+Z6hUkQ8EJNx9gKoqRUAEREAEREAEokkgOsK9QYMGw4cPv+6667744gvWce/evTsx7tEctAj1auUi7gpwj9CgqisiIAIiIAIiIAL+EIiOcDd8utR8SPPa1HvvvXfo0KGffPKJP+hUaw4IaBH3HEBUFSIgAiIgAiIgAsVBIGrCnVEjzP2hhx567rnnmjdvfvDBBxfHOIa1l1rEPawjJ7tFQAREQAREQATyTiA6wn3KlCmPPPIIr2GaO3funDlznnjiicMPP5xXA+YdqRqsA4GVMe4KlakDNBUVgZwQqF+//imnnEJVJHJSoSoRAREQARHwm0AUVpV59tln9913365duxLdftttt02dOrW0tJQYd6l2v8+eLOtfsnzFrIXLqKRDC60qkyVLHS4CdSbANySr6PLRV2Wd2ekAERABESgQgSh43Pv27XvRRRfxZGrTpk0LhFHNZkKgYt4SDmvcoF7zcjn8MgGoY0RABERABERABIqKQBQ87scff/yQIUP23nvv++67jyCZohq/UHd2ZZxMuRx+oR5HGR9SArzv4r2aD4mQdkFmi4AIiECxEYiCcL///vsrKipOPPHEYcOGtWvX7q9//Wt1dTXruBfbWIauvyvXglScTOiGTgZHgQB6/b81Hwn3KAyn+iACIlAcBKIg3BkpFm4fMGAAv0Fff/31Jpts0qZNm969ex955JGsLVMc4xjKXhqPe4cWejI1lMMno0VABERABERABPJMICLC3VJjGfcbbrjh119/feyxxxYtWtSvXz+7S4mgETCLuLdrLo970EZG9oiACIiACIiACASRQBQeTo3nyqoyB9R8ZsyYEb9XOQEhYBZxb68lZQIyHjJDBERABERABEQg2ASi5nH30GalM0+ONoNDYGWMu0JlgjMmskQEREAEREAERCC4BCIu3IMLvugt4wHilTHuCpUp+rNBAERABERABERABNIgIOGeBiQV8YHAnEXLlyyPrfzTtrk87j7wVZUiIAIiIAIiIAKRIxDNGPfIDVMEO2Tc7a2aNmxYVi+C3VOXRCDwBMrKyk444QTMJBF4Y2WgCIiACIhAjEB0PO7rr7/+77//7h7VuXPnkunOUTo4BFYGuCtOJjhjIkuKiwAP8Xeo+ZAorp6rtyIgAiIQWgLR+b7+5ZdfPK8RWbp06ZQpU0I7NBE3vGLuYnqoRdwjPszqngiIgAiIgAiIQO4IROEO6YsvvmiAvP76682bNzdpRPzbb7+93nrr5Y6VasolganzllCdFnHPJVPVJQJ1IcCX5EcffcQR2223Xb16ilirCzuVFQEREIECEYiCcD/ooIOgV1JSwstTLcb69euj2m+77Tabo0SgCChUJlDDIWOKkADC/a233qLjW2+9tYR7EZ4A6rIIiEAYCURBuFdVxRYn6dSp08cff9yyZcswDkMR2rxyLUgtKVOEg68ui4AIiIAIiIAIZEIgOjHuEydOdKt2nkxNn8eQIUNwzzdq1GjbbbcdN25cwgPvuOOOrl27lpeXr7vuuuecc86SJbFIDz5XX301zn772WijjUy+/qYmYIS7XpuampL2ioAIiIAIiIAIiIAlEB3hfvPNNw8fPtx07LDDDltrrf+3d+dhUlRnvMeZlRm2GVRkF9wVZRMEEYwmsoiGgCsKV8QoBJFHkAcTQIEgAsYFd8RrULxBBTRREx8XiAkaAwSFYFDBB9xQGDYJ2wzDrPfHFFNpexZm6eWcU9/+Q6urq0695/N2D++cOXX6OK2X8Mknn/hdrWhDZ40fP37atGlr167t2LFjv379du7cGXbwSy+9NHHiRB2zYcOG+fPn65TJkyf7x5xzzjlZpY8PP/zQ389GRQJ5BUU7DxzWqxTuFRGxHwEEEEAAAQQQCBNwp3CfN2+exsLVvWXLlmni5jvvvNO/f/+77rorrMNln86ZM2fEiBE333xzu3bt1Ei9evWee+65sMNWrFjRs2fPIUOGaGC+b9++N9xwQ+jAvFZBblb6CB31D2uEp77Ajv25xcV1UpMTj6+f6u9kAwEEEEAAAQQQQKASARfmuHvd2759u1e4v/nmm9ddd53KaxXZmvpSSef1Ul5e3po1ayZNmuQdpvWMe/fuvXLlyrCzLrzwwoULF6pY79at21dfffXWW2/deOON/jGbNm1q0aKFZtr06NFj9uzZJ510kv+Sv6G1KfXwnu7fv18b+SUPb8P/r3eA8//9dvcB9bFFRlpBQYFLnVVK1R3vvy71K4B9CUIq/TeqNjTZz9UsByGVruYurF+kMgzE3qdhqfSe2tudGEfuTuHeuHHj7777TrW7xtrvu+8+ORYXF4et7F4Wd/fu3TqmadOm/kva3rhxo//U29BYu47s1auX2lStOWrUKH+qjH43WLBggaa/a7LM9OnTL7rook8//bRhw4ZhLaig16uhO5cuXarRfX+P/lDgbzu/8dEuVQlJqQUH9SuQe50NVCrdS19oj9xOpf/jUQvpOr+qjNupDH3TOr9NKp1JsZ/KnJwcZzoVg464U7hfddVVKq9PP/10fX+qJsnI7t///vdpp50WEcTly5fPmjVr7ty5KtM3b948duzYGTNmTJkyRY1719JGhw4d9GqbNm2WLFlyyy23hF1Xg/qaSe/t1Ii7fsHQ3wQaNWqkPfpdU2/fPn36aAnLsLNcffrt+1/V2bz53FNaXX75uS71MYCpdCl9oX0JQiq1Hpd+aqnX+nHk8JenBiGVoW9dh7dJpTPJDUulNw3Bmd5FuyPuFO6PPPKI5sZo0P2BBx5o0KCB4DQEPnr06MoFNSVdQ007duzwD9O25qv7T70N1eiaG3Prrbfqafv27bOzs0eOHHn33XeH/WuXmZl5xhlnqLIPO11P65Y8QverTA+t1MOehh7p3vb2A3nqVKvj6ocKONPNQKXSmayV2xHnUxmpoY1y9Yza6XwqjdKOajCkMqq8sWzcT6U2Ynld26/lTuGuxE+YMCE0H1q0MfRpudupqaldunTRd6x63+KkIShtjxkzJuxg/R0ntEb3/qysaTNhhx08ePDLL78Mnf4edgBPPQEWceedgAACCCCAAAIIVFfAnVVl1PM//OEPmoau+0S//fZbPdXK62+88cYxRTSD5dlnn33hhRe01ONtt92m0XStMKOzhg0b5t+0OmDAgKeffnrRokVaLV7TWjQArz1e+a7fFt5///1vvvlGK89ceeWV2qk1Z4550YAfwCLuAX8D0H0TBDTHXTfc6+FPdjchKmJAAAEEEKhEwJ0RdxXWU6dOHTdu3MyZM71/hzRxRbX7wIEDK+m/Xho8ePCuXbt0rtal6dSpk+5t9e5V3bJliz/Kfs8992jVBf1369atTZo0UdWuq3jNfv/996rUNbFe+/Vrw6pVq7RR+RUD/qr+UrH1v4eEwCLuAX8n0P34Cujn5Ntvv60Y9HPPG4aIbzxcHQEEEEDgmALuFO5PPPGEBs414+X+++/3ut21a9ewyTMVcWhuTNnpMboh1T9eK7Xr25f08Pf4GxqG97fZqIrA/tyC7LxCHdkiI70qx3MMAggggAACCCCAgATcmSqjSSydO3cOTapuB9W8l9A9bJsg4M2TOa5+anpqkgnxEAMCCCCAAAIIIGCFgDuF+8knn7xu3bpQdE16Ofvss0P3sG2CQNY+b55MmgnBEAMCCCCAAAIIIGCLgAtTZe69915NidE9prfffntubq6mUOt2q5dfflnfefT73//elkwEJ86te3PV2ebMkwlOyukpAggggAACCERCwIXCXd9Iqq8y1SLr6enpun9USzfqm5i0tsxjjz12/fXXR0KJNiIpULoWJBPcI6lKWwgggAACCCDgvIALhbu/nvrQkocKd62nfuKJJzqfPEs7WLoWJFNlLE0gYSOAAAIIIIBAfARcKNwlp7Uafb96JQ//KRumCZQW7oy4m5YZ4gmWgBbL8r50QhvB6jm9RQABBKwVcOTn9RlnnBFau4emY8+ePaFP2Y67wLaSOe4s4h73RBBAwAX0PRX6yRlwBLqPAAII2CXgSOGuae4ZGRl20Qcz2oLCou37j9yc2jKTEfdgvgXoNQIIIIAAAgjUUMCRwl03oTKpvYZvgdietvPA4cKi4pSkhCYN6sb2ylwNAQR+JKBvTl2/fr12tW/fnm9O/RENTxBAAAFTBVwo3CuaJGOqeaDj8hZxb5aRlpj4v9sSAi1C5xGIk4AK9zfeeEMXb9euHYV7nJLAZRFAAIHqCbjwBUz+qjLV6zpHx0OARdzjoc41EUAAAQQQQMAFARdG3IuKilxIRTD6wCLuwcgzvUQAAQQQQACByAu4MOIeeRVajJpA6VqQLOIeNWIaRgABBBBAAAFHBSjcHU2sqd0qLdxZUsbUDBEXAggggAACCJgqQOFuamYcjcub484i7o6ml24hgAACCCCAQBQFKNyjiEvTZQWY417WhD0IIIAAAggggEBVBFy4ObUq/eQYEwSyDxfsO5SvSJpnMMfdhIQQQ6AFkpOTr7nmGhFoI9AQdB4BBBCwR4Cf1/bkyv5IvUXcG6UlN0xLsb839AABuwUSExPPOeccu/tA9AgggEDABJgqE7CEx7W7THCPKz8XRwABBBBAAAG7BRhxtzt/dkXPkjJ25Yto3RbQN2Bs2LBBfTz77LM1+u52Z+kdAggg4IYAP6zdyKMdvSgt3Jngbke+iNJtgYKCgldLHtpwu6f0DgEEEHBGgMLdmVRa0JGtew8pStaCtCBVhIgAAggggAAC5glQuJuXE3cjYi1Id3NLzxBAAAEEEEAg6gIU7lEn5gK+wLa9udpmxN0HYQMBBBBAAAEEEKi6AIV71a04slYCRUXF3nKQFO61cuRkBBBAAAEEEAiqAIV7UDMf837vzj6cX1icmFCnacO6Mb84F0QAAQQQQAABBKwXoHC3PoW2dMCbJ9NUX7+UxLvOlqQRJwIIIIAAAggYJMA67gYlw+1QSteCTHe7m/QOAVsEkpKSBg4cqGi1YUvMxIkAAggEXIDCPeBvgNh1n8I9dtZcCYEqCKhe79SpUxUO5BAEEEAAAVMEmLRgSiacj6N0EXe+fcn5VNNBBBBAAAEEEIiKACPuUWGl0bICLOJe1oQ9CMRRoKioaPPmzQrgtNNOS0xkECeOqeDSCCCAQFUF+GFdVSmOq6XA0UXcM5jjXktITkcgMgIFBQUvlzy0EZkWaQUBBBBAIMoCFO5RBqb5UgHmuJdK8H8EEEAAAQQQQKAmAhTuNVHjnOoK5OYX/pCdp7NaZDLHvbp4HI8AAggggAACCBwRoHDnfRALgax9ubpMvdSkjPSUWFyPayCAAAIIIIAAAs4JULg7l1IjO+TPk0lISDAyQIJCAAEEEEAAAQRMF6BwNz1DbsRXuhYkd6a6kU96gQACCCCAAAJxEKBwjwN6AC9ZuhYkE9wDmHy6jAACCCCAAAKREWAd98g40krlAkenyrAWZOVMvIpADAX0zan9+/fXBbURw8tyKQQQQACBmgtQuNfcjjOrLnB0EfdMpspU3YwjEYiugOr1bt26RfcatI4AAgggEFEBpspElJPGKhDYtu+QXmlB4V6BD7sRQAABBBBAAIFjCjDifkwiDqitQHFxcemqMsxxry0m5yMQKYGioqItW7aotZNOOikxkUGcSLnSDgIIIBBFAX5YRxGXpj2B/+bk5+YXabtZBoU7bwoETBEoKCh4oeShDVNiIg4EEEAAgUoFKNwr5eHFSAh4w+1NGtatm8w9cJEApQ0EEEAAAQQQCKQAhXsg0x7bTrOIe2y9uRoCCCCAAAIIuClA4e5mXo3qFYu4G5UOgkEAAQQQQAABSwUo3C1NnE1hs4i7TdkiVgQQQAABBBAwVYDC3dTMOBQXi7g7lEy6ggACCCCAAAJxE6Bwjxt9cC7MIu7ByTU9RQABBBBAAIHoCbCOe/RsafmoAIu481ZAwEABfXNq7969FZg2DAyPkBBAAAEEygpQuJc1YU8kBfIKinYeOKwW+drUSLLSFgK1FlC93rNnz1o3QwMIIIAAArETYKpM7KyDeaUd+3OLi+ukJiceXz81mAL0GgEEEEAAAQQQiIgAI+4RYaSRCgW8RdxbZqYnJCRUeBAvIIBAzAWKioqysrJ02ebNmycmMogT8wRwQQQQQKD6Avywrr4ZZ1RHgAnu1dHiWARiJ1BQUPD7koc2YndVroQAAgggUAsBCvda4HFqFQRYxL0KSByCAAIIIIAAAggcW4DC/dhGHFEbga17c3U6d6bWxpBzEUAAAQQQQAABCVC48zaIrkDWvkO6gOa4R/cytI4AAggggAACCLguQOHueobj3T9vqkzzzLR4B8L1EUAAAQQQQAABuwUo3O3On+HRFxcXb/3vkRF3psoYninCQwABBBBAAAHzBSjczc+RxRHuzy3IzitUB1pkMFXG4jwSOgIIIIAAAgiYIMA67iZkwdkYvHkyx9VPTU/lO9WdzTIds1RA35x68cUXK3htWNoFwkYAAQSCJkDhHrSMx7S/LOIeU24uhkB1BFSvX3LJJdU5g2MRQAABBOIswFSZOCfA7cuziLvb+aV3CCCAAAIIIBBLAUbcY6kduGuxiHvgUk6H7RHQveO7du1SvE2aNElISLAncCJFAAEEgivAiHtwcx+DnrOIewyQuQQCNRPIz89/uuShjZq1wFkIIIAAAjEWoHA/Av7UU0+1bds2LS2te/fuq1evLjcHjz766Jlnnpment66des777wzN/fIF4KGPu6//36NWo0bNy50Z8C3WcQ94G8Auo8AAggggAACERSgcK+zePHi8ePHT5s2be3atR07duzXr9/OnTvDiF966aWJEyfqmA0bNsyfP1+nTJ48OfSYjz766JlnnunQoUPoTra37T3y6w2LuPNOQAABBBBAAAEEai9A4eHP9oEAACUdSURBVF5nzpw5I0aMuPnmm9u1azdv3rx69eo999xzYbIrVqzo2bPnkCFDNDDft2/fG264IXRg/uDBg0OHDn322WcbN24cdmKQnxYUFm3ff6Rwb5nJIu5BfiPQdwQQQAABBBCIjEDQb07Ny8tbs2bNpEmTPM7ExMTevXuvXLkyTPfCCy9cuHChivVu3bp99dVXb7311o033ugfc/vtt19xxRU68b777vN3hm0cLnl4O/fv368NzSv1ppaG/jfsLKufZu3LLSwqTklKyKyb6PXR6u5UJXhXU1mVvjt2TBBS6X8qteHwzalBSKVjn76KukMqK5Kxbn9YKr2n1vUiXgEHvXDfvXt3YWFh06ZN/QRoe+PGjf5Tb0Nj7TqyV69eWoehoKBg1KhR/lSZRYsWaY6NpsqEnRL2dPbs2dOnTw/duXTpUo3u+3uWLVvmb7ux8dWRX0+SGyUXvfPO2270qIq9cC+VVey4e4e5nUr96PNS9u677zr/HUxup9K9j14lPSKVleDY9ZKfypycHLsij2+0QS/cq6i/fPnyWbNmzZ07V3evbt68eezYsTNmzJgyZcp3332nbb35dGNr5U1pUF8z6b1jNOKuO1w15aZRo0bao9811UKfPn1SUlIqb8SuV//yn6w6n60/tflxl19+vl2R1zhaV1NZYxB7TwxCKvX3xvXr1ytHurEnNTXV3mRVHnkQUlm5gDOvkkpXU+lNQ3Cmd9HuSNAL9xNOOEFDTTt27PChtd2sWTP/qbehGl1zY2699VY9bd++fXZ29siRI++++25Ns9GdrOedd553mEawPvjggyeffFLzYsJGsOqWPEKbVZkeWqmHPQ090tLtHQeOrDHXqnG90G5a2pdqhe1eKqvVfZcOdjuVmhnYo0cP5UvjDmE/r1xKotcXt1PpXr4q6RGprATHrpf8VGrDrsjjG23QC3eNM3Xp0uW9994bNGiQMlFUVKTtMWPGhGVFf8fRP3L+Tu8fOU2bufTSS70hK+8l3eF61lln/eY3v3H+X0GfopINbxF3lpSphIiXEIijgH5M6e9+cQyASyOAAAIIVFcg6IW7vDSD5aabburatatuPNVi7RpNV/2t/cOGDWvZsqXmpmt7wIABWnymc+fO3lQZDcBrj/7Za9iw4bnnnuuj169f//jjjw/d478UwA0WcQ9g0ukyAggggAACCERPgMK9zuDBg/W931OnTt2+fXunTp3eeecd717VLVu2+KPs99xzj1Zd0H+3bt2qrwdX1T5z5szoZcWNlreyiLsbiaQXjgrob4b79u1T5zIyMhxeVcbR7NEtBBAIqACF+5HEa25M2ekxuiHVf1MkJyfr25f08PeUuxF6SrkHBGqnN+LOIu6BSjqdtUhAt/o99thjCli3zjt8c6pFGSFUBBBA4JgC/5u3fcxDOQCBqgscPFyw79CRm1ObZxxjvZ2qt8mRCCCAAAIIIIBAkAUo3IOc/Sj2PWvvIbXeKC25YRp3i0fRmaYRQAABBBBAIDgCFO7ByXVMe7q1pHBnSZmYonMxBBBAAAEEEHBagMLd6fTGr3PbSu5MZYJ7/DLAlRFAAAEEEEDANQEKd9cyakh/vEXcm2cywd2QhBAGAggggAACCFgvQOFufQrN7ABTZczMC1EhgAACCCCAgL0CLAdpb+6Mjpy1II1OD8EhUKeOvqdCXzwnCf8LK1BBAAEEEDBcgMLd8ATZGp43x52bU23NH3EHQEBfT3HFFVcEoKN0EQEEEHBHgKky7uTSnJ4UFRV7c9wp3M1JCpEggAACCCCAgO0CjLjbnkET49998HB+YXFiQp2mDeuaGB8xIYBAnTrFxcU5OTmSqFevXkJCAiQIIIAAAuYLMOJufo7si9C7M7WZvn4piTeYfekj4oAI5OfnP1Ty0EZAukw3EUAAAdsFqKtsz6CJ8Wfty1VYzJMxMTfEhAACCCCAAALWClC4W5s6gwP3lpRpnplucIyEhgACCCCAAAIIWCZA4W5ZwqwIt3QRd759yYp0ESQCCCCAAAII2CFA4W5HnuyKkkXc7coX0SKAAAIIIICAFQIU7lakybIgjy7insFUGcsSR7gIIIAAAgggYLIAhbvJ2bE1Nm/EnZtTbc0fcSOAAAIIIICAkQKs425kWmwOKje/8IfsPPWgJTen2pxHYndeIDExsWPHjuqmNpzvLB1EAAEE3BCgcHcjjwb1whtur5+a1Cidd5dBeSEUBMIEkpOTBw0aFLaTpwgggAACJgsw0GJydqyMzV/Ene9itDJ/BI0AAggggAACpgowJmpqZqyNy1sLkkXcrU0ggQdFoLi42PvO1JSUFH7NDkrW6ScCCFguwIi75Qk0L/zStSBZxN283BARAiECqtpnlzy88j3kFTYRQAABBAwVoHA3NDH2hnV0SRnWgrQ3hUSOAAIIIIAAAkYKULgbmRabgzq6iDtLyticRGJHAAEEEEAAAQMFKNwNTIrdIbGIu935I3oEEEAAAQQQMFWAwt3UzNgZl253825OZRF3OxNI1AgggAACCCBgrgCFu7m5sTGyPdl5hwuKEhLqNM2oa2P8xIwAAggggAACCBgrQOFubGqsDMxbxL1Jg7p1k5Os7ABBI4AAAggggAACpgqwjrupmbEzLhZxtzNvRB1EgcTExHbt2qnn2ghi/+kzAgggYKEAhbuFSTM4ZBZxNzg5hIbAjwSSk5OvvfbaH+3iCQIIIICA2QIMtJidH9uiYxF32zJGvAgggAACCCBgjQCFuzWpsiJQFnG3Ik0EiQACCCCAAAI2CjBVxsasmRuzN8e9Bd++ZG6KiAyBowJ5eXmzZ8/Wk0mTJqWmpuKCAAIIIGC+ACPu5ufIpghL57in2xQ0sSKAAAIIIIAAAjYIULjbkCVLYswrKNp18LCCbZGZZknIhIkAAggggAACCFgjQOFuTarMD3TH/tzi4jp1kxOPq8+f3c1PFxEigAACCCCAgGUCFO6WJczkcP0J7gn66lQeCCCAAAIIIIAAAhEVoHCPKGewGzu6FiTzZIL9NqD3CCCAAAIIIBAlAQr3KMEGsVkWcQ9i1ukzAggggAACCMRKgOUgYyUdgOts3ZurXrIWZABSTRddEEhMTDz99NPVE2240B/6gAACCARAgMI9AEmOVRdZCzJW0lwHgQgIJCcnDxkyJAIN0QQCCCCAQKwEGGiJlXQArlM6x51F3AOQbLqIAAIIIIAAAjEXoHCPObmjFywuLi4t3FnE3dEc0y0EEEAAAQQQiKsAU2Xiyu/QxffnFmTnFapDzHF3KKt0xWWBvLy8hx56SD2cMGFCairfveByrukbAgg4I0Dh7kwq49wRb7hdX72UlpIU51C4PAIIVE0gPz+/agdyFAIIIICAEQJMlTEiDQ4EwTwZB5JIFxBAAAEEEEDAZAEKd5OzY1NsRwv3DO5MtSlrxIoAAggggAACFglQuFuULKNDZRF3o9NDcAgggAACCCBgvwCFu/05NKMH3oh7y0xG3M3IB1EggAACCCCAgHMCFO7OpTROHSqd407hHqcEcFkEEEAAAQQQcF2AVWVcz3Cs+ldauLOIe6zEuQ4CtRNISEho06aN2tBG7VribAQQQACBGAlQuMcI2u3LFBQW7ThwWH1kEXe3E03vXBJISUkZPny4Sz2iLwgggIDzAkyVcT7FsejgzgOHC4uKU5ISmjSoG4vrcQ0EEEAAAQQQQCB4AhTuwct5FHrszZNplpGWmMjf3KPgS5MIIIAAAggggECdOkyV4V0QAYGtew+plRYs4h4BS5pAIEYCeXl5jz32mC42duzY1NTUGF2VyyCAAAII1EKAwr0WeJxaKrBtb642WQuy1IP/I2CHQE5Ojh2BEiUCCCCAQIkAU2V4I0RAoHRJGdaCjAAmTSCAAAIIIIAAAuUKULiXy8LO6glQuFfPi6MRQAABBBBAAIHqC1C4V9+MM8oIHJ3jnski7mVo2IEAAggggAACCERIgMI9QpDBbiZr35E57iziHux3Ab1HAAEEEEAAgegKULhH1zcIrR88XLDvUL562jyDEfcgJJw+IoAAAggggEB8BFhVJj7uLl01q2QtyEZpyQ3TUlzqF31BwG2BhISEFi1aqI/acLun9A4BBBBwRoDC3ZlUxq0jpRPcWVImbingwgjUQCAlJWXEiBE1OJFTEEAAAQTiJcBUmXjJu3NdFnF3J5f0BAEEEEAAAQQMFqBwP5Kcp556qm3btmlpad27d1+9enW5+Xr00UfPPPPM9PT01q1b33nnnbm5R27H1OPpp5/u0KFDo5JHjx493n77bW9/cP7LWpDByTU9RQABBBBAAIE4ClC411m8ePH48eOnTZu2du3ajh079uvXb+fOnWEpeemllyZOnKhjNmzYMH/+fJ0yefJk75hWrVrdf//9a9as+fjjj3/2s58NHDjws88+Czvd7acU7m7nl965KpCfn6/xCD204Wof6RcCCCDgmACFe505c+ZooufNN9/crl27efPm1atX77nnngtL84oVK3r27DlkyBANzPft2/eGG27wB+YHDBhw+eWXn3766WecccbMmTMbNGiwatWqsNPdfrpt3yF1sAWLuLudZnrnnEBxcfG+koc2nOscHUIAAQTcFAh64Z6Xl6fB8t69e3vpTUxM1PbKlSvDsn3hhRfqMK9Y/+qrr9566y0V62HHFBYWLlq0KDs7WxNmwl5y+6k3x51F3N3OMr1DAAEEEEAAgbgLBH1Vmd27d6vgbtq0qZ8JbW/cuNF/6m1orF1H9urVS0NTBQUFo0aN8qfK6ID169erWNesdw23v/baaxq5DztdTw+XPLz9+/fv14b+PO39hTr0v2VPNHxPUVFxVsmI+4n1k72OGB5wVMOzOpVRlbGu8SCk0v/AasPhFSGDkErrPl81C5hU1szNwLPCUuk9NTBOM0MKeuFexawsX7581qxZc+fO1d2rmzdvHjt27IwZM6ZMmeKdrptW161bp785v/rqqzfddNP7779ftnafPXv29OnTQy+3dOlSTcvx9yxbtszftmhjX16d/MLkhDrFa/7593UsBl2SOUtTadG7Lmahup1KjVl4ku+++25SUlLMVONyIbdTGRfSeF2UVMZLPuLX9VOZk5MT8cYdbjAh4LMbNVVG1bMK7kGDBnlpVuW9d+/eN954IzTrF1100QUXXPDggw96OxcuXDhy5MiDBw9qak3oYdrWTJtTTz31mWeeCdsfNuKupWk0hK+laHSYftfU27dPnz5aVjnsLPOfrvtu77X/d7W+M/WDCT8xP9poR2h1KqONY1f7QUilfvo99NBDysuECRNSU1PtSlDVow1CKquuYfWRpNLq9IUGH5ZKTUM44YQTNPrpFUWhR7JdViDoI+7656pLly7vvfeeV7gXFRVpe8yYMWFS+nUwtEb3RqfK/Z1HLahGDztdT+uWPEL3q0wPrdTDnoYeafL2zoMFCq9lZnpoX0wOOAaxWZrKGMhYdwm3U+n/BHO7m967Lgh9tO7zVbOASWXN3Aw8y0+lNgwMz9iQgl64KzFaC1Kj7F27du3WrZtWRtPdpVphRvuHDRvWsmVLTXHRtpaO0eIznTt39qbKaJKM9njl+6RJk/r373/SSScdOHBAq0ZqUo3+7mxsviMeGGtBRpyUBhGIjYDmtTdp0kTXcniCe2wkuQoCCCAQMwEK9zqDBw/etWvX1KlTt2/f3qlTp3feece7V3XLli3+KPs999yjf9v0361bt+qfOlXtWvnRS5IWfVeJn5WVlZGRoW9iUtWuSS8xy1/cL7R1r7cWZHrcIyEABBColoBGuUaPHl2tUzgYAQQQQCC+AhTuR/w1N6bs9BiNnfu5SU5O1rcv6eHv8Tf0fUz+dgA3vCVlWrKIewBzT5cRQAABBBBAILYC4fdWxvbqXM16AW8R9+YZjLhbn0o6gAACCCCAAAKGCzDibniCTA+POe6mZ4j4EKhAQAs7PPvss3pRXx3NzWEVILEbAQQQMEuAwt2sfNgVTW5+4Q/ZeYpZq8rYFTnRIoCAVpXR7T1y8JeXwQQBBBBAwHABpsoYniCjw/OG2+unJjVK5zdAozNFcAgggAACCCDggACFuwNJjFsXvAnuLTLTWU4ubjngwggggAACCCAQGAEK98CkOgodZYJ7FFBpEgEEEEAAAQQQKF+Awr18F/ZWRYBF3KuixDEIIIAAAggggEBEBCjcI8IY0EZYxD2giafbCCCAAAIIIBAPAe4pjIe6K9dkEXdXMkk/giigW1P0fc/qOfeoBDH99BkBBOwUoHC3M29mRM0cdzPyQBQI1ERAa7ePGzeuJmdyDgIIIIBAnASYKhMnePsvq7WfvTnuLOJufzLpAQIIIIAAAghYIEDhbkGSzAxxT3be4YKihIQ6TTPqmhkhUSGAAAIIIIAAAi4JMFXGpWzGtC/eBPcmDerWTU6K6YW5GAIIREIgPz9/wYIFamn48OGaNhOJJmkDAQQQQCC6AhTu0fV1uHXWgnQ4uXQtCAKa7bZt2zb1VBtB6C99RAABBBwQYKqMA0mMTxe8O1OZ4B4ffa6KAAIIIIAAAsEToHAPXs4j1GNvEfcWmWkRao9mEEAAAQQQQAABBCoTYKpMZTpWv1ZYVLz66z07D+Se2DCt28nHJSUmRLA7anz99/vU4OH8Im1HtvEIxklTCCCAAAIIIICAMwIU7s6k8kcdeefTrOl/+TxrX663t3lG2rQB7S47t/mPDqrpk9DG/9+qb5dt2BHBxmsaFOchgAACCCCAAAKOCzBVxsEEq7C+beFav2pXD7fvy9Ue7a99b6PaeO3DowUEEEAAAQQQQMBVAUbcXcusJq5orD1skQg91USZ3/75856nnVCbaS1qfNqfPyu3cV20T7tmtWnctUzQHwSMF6hXr57xMRIgAggggMD/BCjc/2fhxpbmtYeOtfudUrW9fX9u+98u9fdEcEON66K6dI9Tj49gszSFAALRE0hNTb3rrrui1z4tI4AAAghEXICpMhEnjXODuhs1XhHE8dLx6jLXRQABBBBAAAEEYibAiHvMqGN0Ia0hU8mVFtx8vlaYqeSAyl/SmPrw5z+q6JjKL13RWexHAAEEEEAAAQQQqIoAhXtVlGw6RnW51pDR3ahhM9E1x71ZRtpFpzepzTR0nV5J47X5lcAmYmJFwAmB/Pz8F198UV0ZOnRoSkqKE32iEwgggIDjAkyVcS3Bqsu1OKN6Fbpsu7et/bWp2tVmVBt3LRP0BwGzBYqLi78teWjD7EiJDgEEEEDgqACFu4NvBa3X/vT/OU/j637ftK09EVnHPaqN+wGzgQACCCCAAAIIIBAmwFSZMBBHnqq81uKMUfrm1Kg27kgC6AYCCCCAAAIIIBBpAQr3SIsa056mtURvccaoNm4MIYEggAACCCCAAAIGCTBVxqBkEAoCCCCAAAIIIIAAAhUJULhXJMN+BBBAAAEEEEAAAQQMEmCqjEHJIBQEEEAglgKsAhlLba6FAAII1F6Awr32hrSAAAII2CeQmpo6efJk++ImYgQQQCDAAkyVCXDy6ToCCCCAAAIIIICAPQIU7vbkikgRQAABBBBAAAEEAizAVJkAJ5+uI4BAgAUKCgqWLFkigOuuuy45mX8LAvxWoOsIIGCPAD+s7ckVkSKAAAKREygqKtq0aZPa00bkWqUlBBBAAIEoCjBVJoq4NI0AAggggAACCCCAQKQEKNwjJUk7CCCAAAIIIIAAAghEUYDCPYq4NI0AAggggAACCCCAQKQEKNwjJUk7CCCAAAIIIIAAAghEUYDCPYq4NI0AAggggAACCCCAQKQEWFUmUpLVaKe4uFhH79+/3zsnPz8/JydHT/n68WogGnkoqTQyLTUJKgipzMvLy83NlY5++OhbVGvCZMM5QUilDXmIQIykMgKIZjQRlkqvHPJKIzMCNDqKBKRin5/vv/++devWsb8uV0QAAQQQQAABBAwU+O6771q1amVgYKaFROEeh4xo1eRt27Y1bNgwISFBl9fvmqrj9ZZt1KhRHKLhkpETIJWRs4xzS6QyzgmI3OVJZeQs49wSqYxzAiJ3+bBUagT5wIEDLVq0SExk/vaxlZkqc2yjiB+ht2bZXytVtVO4R5w6Lg2SyriwR+OipDIaqnFpk1TGhT0aFyWV0VCNS5uhqczIyIhLDDZelF9ubMwaMSOAAAIIIIAAAggEToDCPXApp8MIIIAAAggggAACNgok/fa3v7UxbsdiTkpKuuSSS5KTmblkfWJJpfUpLO0AqSyVsP7/pNL6FJZ2gFSWSlj/f1JZ4xRyc2qN6TgRAQQQQAABBBBAAIHYCTBVJnbWXAkBBBBAAAEEEEAAgRoLULjXmI4TEUAAAQQQQAABBBCInQCFe+ysuRICCCCAAAIIIIAAAjUWoHCvMR0nIoAAAggggAACCCAQOwEK99hZl3ulp556qm3btmlpad27d1+9enW5x7DTcAEtzaQvwfUfZ511luEBE16YwAcffDBgwAB9b5+S+Prrr/uv6vv8pk6d2rx58/T09N69e2/atMl/iQ0zBSpK5fDhw/1PqDYuu+wyM+MnKk9g9uzZ559/vr5f/MQTTxw0aNAXX3zhy+Tm5t5+++3HH398gwYNrr766h07dvgvsWGmQCXZ1Hp6oR/MUaNGmdkFo6KicI9nOhYvXjx+/Php06atXbu2Y8eO/fr127lzZzwD4to1FTjnnHOySh8ffvhhTZvhvPgIZGdn6wOo36LDLv/AAw88/vjj8+bN+9e//lW/fn19QlU0hB3DU6MEKkqlglSxXvoZzXr55ZeNCptgwgTef/99VeerVq1atmxZfn5+3759lVnvmDvvvPMvf/nLK6+8omO2bdt21VVXhZ3LU9MEKsmmQh0xYoT/wdSPXNOCNzEeDSnxiJdAt27d9LPJu3phYaEG/PSLabyC4bo1FtCvXir7anw6J5ojoJ/Rr732mhdPUVFRs2bNHnzwQe/p3r1769atq4LPnGiJpBKB0FTqsJtuumngwIGVHM9Lxgp441kq/hShPoYpKSmq2r1oN2zYoESvXLnS2OAJLEwgNJt66eKLLx47dmzYMTytXIAR97j9NpWXl7dmzRr9/d2LIDExUdv6ARS3gLhwLQQ0iUK/d51yyilDhw7dsmVLLVriVFMEvv766+3bt/uf0IyMDM1n4xNqSnqqH8fy5cs17+LMM8+87bbbfvjhh+o3wBnxEdi3b58ufNxxx+m/+kdTA/D+p1LzEk866SQ+lfFJTI2uGppNr4EXX3zxhBNOOPfccydNmpSTk1OjVoN1El/VGbd87969W6PsTZs29SPQ9saNG/2nbNgioHpuwYIFKgj0977p06dfdNFFn376qWZn2hI/cZYroKpd+8M+od7Oco9np8kCmiejORUnn3zyl19+OXny5P79+6va03c3mhwzsUlAf/gaN25cz549VdjpqT6AqampmZmZPo4+oXwqfQ3DN8KyqWiHDBnSpk0bDXv95z//+c1vfqObGf70pz8Z3ou4h0fhHvcUEID1AioCvD506NBBRbx+DC1ZsuSWW26xvmN0AAFXBK6//nqvK+3bt9fn9NRTT9UA/KWXXupK/5zth2aTahyEG4fcSHDZbI4cOdL/YGoZAH0k9au1Pp5u9DdKvWCqTJRgj92s/jak8Z7QO+K1rTm1xz6TIwwW0FDQGWecsXnzZoNjJLQqCXgfRj6hVcKy6iBNadOPXz6k5idtzJgxb7755t///vdWrVp50epTqVmmmunuB8+/mz6F4RtlsxkWsIa9tIcPZhhL2acU7mVNYrRHf+/r0qXLe++9511Pf0LSdo8ePWJ0eS4THYGDBw9qwEAjB9FpnlZjJ6BpFaoS/E/o/v37tbYMn9DYJSBqV/r+++81x50PadSAI9Cwbs5Tnac7xf/2t7/pk+i3qH80dXOq/6nUzArdU8Sn0vcxc6OibIZFu27dOu3hgxnGUvZpkpagLruXPbERaNSo0ZQpU1q3bq3VKrShd+38+fO1Nm1srs5VIiUwYcIEZVCtff7551qGVnfNawFBrR4YqfZpJ9oC+nVLudNM2WeeeUajPlq1XaN6+uOJ7kKZNWtWu3bt9PSOO+7QjVNPPPFEcjIzDKOdkJq3X24q9bfNu+++Wz9vCwoKdHejprHpx+zDDz9MKmsOHeUzNadC9yy++uqrmv2snOqhJKpk13eeaAnIJ598slOnTnv27PnVr36lf0C1rleUw6H5WglUlE0NcmkRXn0Y9QNW38Cgfz01k23ixIm1ulgQTq580RlejbaA6gDdFK/Rdy0NqTVro3052o+GwODBgzVIoCS2bNlS2/pLXzSuQpvRE9Df4sN+2mv1QF1OfwfTb9S6+02/mGnypYb3ohcDLUdEoNxU6jcuLQTepEkTVX66BUXrRuuXtIhcjkaiJBD2edTT559/3rvWoUOHRo8e3bhx43r16l155ZVaEiBKMdBspAQqyqb+WvKTn/xE6wXpB+xpp5121113ac2ZSF3U4XYS1LeypuxBAAEEEEAAAQQQQAABowSY425UOggGAQQQQAABBBBAAIHyBSjcy3dhLwIIIIAAAggggAACRglQuBuVDoJBAAEEEEAAAQQQQKB8AQr38l3YiwACCCCAAAIIIICAUQIU7kalg2AQQAABBBBAAAEEEChfgMK9fBf2IoAAAggggAACCCBglACFu1HpIBgEEEAAAQQQQAABBMoXoHAv34W9CCCAAAIIIIAAAggYJUDhblQ6CAYBBBCoocCuXbtuu+02fROzvoawWbNm/fr1++c//6m2EhISXn/99Ro2ymkIIIAAAiYJJJsUDLEggAACCNRQ4Oqrr87Ly3vhhRdOOeWUHTt2vPfeez/88EMN2+I0BBBAAAEjBRhxNzItBIUAAghUR2Dv3r3/+Mc/fve73/30pz9t06ZNt27dJk2a9Itf/KJt27Zq5sorr9S4u7etp2+88cZ5552XlpamEn/69OkFBQXepXTM008/3b9///T0dL306quvevv1+8CYMWOaN2+uU9T47Nmzvf38FwEEEEAgxgIU7jEG53IIIIBA5AUalDw0Jebw4cOhrX/00Ud6+vzzz2dlZXnbqu+HDRs2duzYzz///JlnnlmwYMHMmTP9U6ZMmaKR+08++WTo0KHXX3/9hg0b9NLjjz/+5z//ecmSJV988cWLL77o/wLgn8UGAggggEBsBBKKi4tjcyWuggACCCAQPYE//vGPI0aMOHTokEbTL774YpXdHTp00OU0jv7aa68NGjTIu3Tv3r0vvfRSjcd7TxcuXPjrX/9627Zt3pGjRo3SoLv30gUXXKCm5s6de8cdd3z22Wd//etf1ZT3Ev9FAAEEEIiLACPucWHnoggggECEBTRSrvpbQ+OXXXbZ8uXLVXNrNL3sNTSafu+993oj9Pqvan0Nxufk5HhH9ujRwz9F296I+/Dhw9etW3fmmWeqgl+6dKl/ABsIIIAAAjEWoHCPMTiXQwABBKIloDnoffr00XSXFStWqNqeNm1a2SsdPHhQ89pViHuP9evXb9q0SSeWPdLfo98Bvv766xkzZmg4/7rrrrvmmmv8l9hAAAEEEIilAIV7LLW5FgIIIBAjgXbt2mVnZ+tiKSkphYWF/lVVhWuq+mk/fiQmHv23YNWqVf6R2j777LO9p40aNRo8ePCzzz67ePFizcnZs2ePfxgbCCCAAAIxE2A5yJhRcyEEEEAgWgJa+fHaa6/95S9/qXntDRs2/Pjjjx944IGBAwfqerqXVEtD9uzZU+u7N27ceOrUqT//+c+13LsGzlWva+bMp59+et9993mRvfLKK127du3Vq5duQl29evX8+fO1f86cOVpSpnPnzjpeB2iR+MzMzGj1hHYRQAABBCoWoHCv2IZXEEAAAUsENFu9e/fujzzyyJdffpmfn9+6dWtNXp88ebLCf/jhh8ePH6/B8pYtW37zzTf6YqY333xT09y1dqQG488666xbb73V76Vm0SxatGj06NGq1F9++WUN2+sl/SagXwM0oyYpKen8889/6623/BF6/0Q2EEAAAQRiIMCqMjFA5hIIIICABQJh689YEDEhIoAAAgETYI57wBJOdxFAAAEEEEAAAQTsFKBwtzNvRI0AAggggAACCCAQMAGmygQs4XQXAQQQQAABBBBAwE4BRtztzBtRI4AAAggggAACCARMgMI9YAmnuwgggAACCCCAAAJ2ClC425k3okYAAQQQQAABBBAImACFe8ASTncRQAABBBBAAAEE7BSgcLczb0SNAAIIIIAAAgggEDABCveAJZzuIoAAAggggAACCNgpQOFuZ96IGgEEEEAAAQQQQCBgAhTuAUs43UUAAQQQQAABBBCwU4DC3c68ETUCCCCAAAIIIIBAwAQo3AOWcLqLAAIIIIAAAgggYKcAhbudeSNqBBBAAAEEEEAAgYAJULgHLOF0FwEEEEAAAQQQQMBOAQp3O/NG1AgggAACCCCAAAIBE6BwD1jC6S4CCCCAAAIIIICAnQIU7nbmjagRQAABBBBAAAEEAiZA4R6whNNdBBBAAAEEEEAAATsFKNztzBtRI4AAAggggAACCARMgMI9YAmnuwgggAACCCCAAAJ2ClC425k3okYAAQQQQAABBBAImACFe8ASTncRQAABBBBAAAEE7BSgcLczb0SNAAIIIIAAAgggEDABCveAJZzuIoAAAggggAACCNgp8P8BhKg2I1BqsPcAAAAASUVORK5CYII=)\n" + ], + "metadata": { + "id": "xHF95Kr4CzGq" + } + }, + { + "cell_type": "markdown", + "source": [ + "\n", + "# Installation\n", + "\n", + "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq` from the extra packages.\n", + "\n", + " ```bash\n", + " pip install adalflow[openai,groq]\n", + " ```\n", + "2. Setup `openai` and `groq` API key in the environment variables\n", + "\n", + "You can choose to use different client. You can import the model client you prefer. We support `Anthropic`, `Cohere`, `Google`, `GROQ`, `OpenAI`, `Transformer` and more in development. We will use OpenAI here as an example.Please refer to our [full installation guide](https://adalflow.sylph.ai/get_started/installation.html)" + ], + "metadata": { + "id": "Kof5M6DRaKhh" + } + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "id": "tAp3eDjOCma1" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai] # also install the package for the model client you'll use\n", + "!pip install datasets\n", + "clear_output()" + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Set Environment Variables\n", + "\n", + "Run the following code and pass your api key.\n", + "\n", + "Note: for normal `.py` projects, follow our [official installation guide](https://lightrag.sylph.ai/get_started/installation.html).\n", + "\n", + "*Go to [OpenAI](https://platform.openai.com/docs/introduction) to get API keys if you don't already have.*" + ], + "metadata": { + "id": "KapUyHMM07pJ" + } + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "ONfzF9Puzdd_", + "outputId": "e5c3cfc5-69cb-448a-c248-a8cebda5ba71" + }, + "execution_count": 43, + "outputs": [ { - "cell_type": "markdown", - "source": [ - "# Issues and feedback\n", - "\n", - "If you encounter any issues, please report them here: [GitHub Issues](https://github.com/SylphAI-Inc/LightRAG/issues).\n", - "\n", - "For feedback, you can use either the [GitHub discussions](https://github.com/SylphAI-Inc/LightRAG/discussions) or [Discord](https://discord.gg/ezzszrRZvT)." - ], - "metadata": { - "id": "AmkbyxmuruUu" - } + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "API keys have been set.\n" + ] } - ] + ] + }, + { + "cell_type": "code", + "source": [ + "from dataclasses import dataclass, field\n", + "from typing import List, Dict, Union, Optional, Tuple, Any, Callable\n", + "from datasets import load_dataset\n", + "from adalflow.components.model_client import OpenAIClient\n", + "import adalflow as adal\n", + "from adalflow.core.component import Component\n", + "from adalflow.datasets.types import TrecData\n", + "from adalflow.eval.answer_match_acc import AnswerMatchAcc\n", + "\n", + "\n", + "_COARSE_LABELS = [\"ABBR\", \"DESC\", \"ENTY\", \"HUM\", \"LOC\", \"NUM\"]\n", + "\n", + "_COARSE_LABELS_DESC = [\n", + " \"Abbreviation: Questions about abbreviations and their meanings\",\n", + " \"Description: Questions seeking descriptions of people, things, or concepts\",\n", + " \"Entity: Questions about entities (e.g., animals, colors, inventions)\",\n", + " \"Human: Questions about people or organizations\",\n", + " \"Location: Questions about places, cities, countries\",\n", + " \"Numeric: Questions seeking numeric answers (e.g., dates, amounts, distances)\",\n", + "]\n", + "\n", + "\n", + "template = r\"\"\"\n", + " {{system_prompt}}\n", + " {% if output_format_str is not none %}\n", + " {{output_format_str}}\n", + " {% endif %}\n", + " {% if few_shot_demos is not none %}\n", + " Here are some examples:\n", + " {{few_shot_demos}}\n", + " {% endif %}\n", + " \n", + " \n", + " {{input_str}}\n", + " \n", + " \"\"\"\n", + "\n", + "task_desc_template = r\"\"\"You are a classifier. Given a question, you need to classify it into one of the following classes:\n", + " Format: class_index. class_name, class_description\n", + " {% if classes %}\n", + " {% for class in classes %}\n", + " {{loop.index-1}}. {{class.label}}, {{class.desc}}\n", + " {% endfor %}\n", + " {% endif %}\n", + " - Do not try to answer the question:\n", + " \"\"\"\n", + "\n", + "\n", + "@dataclass\n", + "class TRECExtendedData(TrecData):\n", + " rationale: str = field(\n", + " metadata={\n", + " \"desc\": \"Your step-by-step reasoning to classify the question to class_name\"\n", + " },\n", + " default=None,\n", + " )\n", + " __input_fields__ = [\"question\"]\n", + " __output_fields__ = [\n", + " \"rationale\",\n", + " \"class_name\",\n", + " ] # it is important to have the rationale before the class_name" + ], + "metadata": { + "id": "ZZIEtZYHNVjo" + }, + "execution_count": 49, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "class TRECClassifierStructuredOutput(adal.Component):\n", + "\n", + " def __init__(self, model_client: adal.ModelClient, model_kwargs: Dict):\n", + " super().__init__()\n", + "\n", + " label_desc = [\n", + " {\"label\": label, \"desc\": desc}\n", + " for label, desc in zip(_COARSE_LABELS, _COARSE_LABELS_DESC)\n", + " ]\n", + "\n", + " task_desc_str = adal.Prompt(\n", + " template=task_desc_template, prompt_kwargs={\"classes\": label_desc}\n", + " )()\n", + "\n", + " self.data_class = TRECExtendedData\n", + " self.data_class.set_task_desc(task_desc_str)\n", + "\n", + " self.parser = adal.DataClassParser(\n", + " data_class=self.data_class, return_data_class=True, format_type=\"yaml\"\n", + " )\n", + "\n", + " prompt_kwargs = {\n", + " \"system_prompt\": adal.Parameter(\n", + " data=self.parser.get_task_desc_str(),\n", + " role_desc=\"Task description\",\n", + " requires_opt=True,\n", + " param_type=adal.ParameterType.PROMPT,\n", + " ),\n", + " \"output_format_str\": adal.Parameter(\n", + " data=self.parser.get_output_format_str(),\n", + " role_desc=\"Output format requirements\",\n", + " requires_opt=False,\n", + " param_type=adal.ParameterType.PROMPT,\n", + " ),\n", + " \"few_shot_demos\": adal.Parameter(\n", + " data=None,\n", + " requires_opt=True,\n", + " role_desc=\"Few shot examples to help the model\",\n", + " param_type=adal.ParameterType.DEMOS,\n", + " ),\n", + " }\n", + "\n", + " self.llm = adal.Generator(\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " prompt_kwargs=prompt_kwargs,\n", + " template=template,\n", + " output_processors=self.parser,\n", + " use_cache=True,\n", + " )\n", + "\n", + " def _prepare_input(self, question: str):\n", + " input_data = self.data_class(question=question)\n", + " input_str = self.parser.get_input_str(input_data)\n", + " prompt_kwargs = {\n", + " \"input_str\": adal.Parameter(\n", + " data=input_str, requires_opt=False, role_desc=\"input to the LLM\"\n", + " )\n", + " }\n", + " return prompt_kwargs\n", + "\n", + " def call(\n", + " self, question: str, id: Optional[str] = None\n", + " ) -> Union[adal.GeneratorOutput, adal.Parameter]:\n", + " prompt_kwargs = self._prepare_input(question)\n", + " output = self.llm(prompt_kwargs=prompt_kwargs, id=id)\n", + " return output" + ], + "metadata": { + "id": "3Q3H9XC4Ncfi" + }, + "execution_count": 50, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "class TrecClassifierAdal(adal.AdalComponent):\n", + " def __init__(\n", + " self,\n", + " model_client: adal.ModelClient,\n", + " model_kwargs: Dict,\n", + " teacher_model_config: Dict,\n", + " backward_engine_model_config: Dict,\n", + " text_optimizer_model_config: Dict,\n", + " ):\n", + " task = TRECClassifierStructuredOutput(model_client, model_kwargs)\n", + " eval_fn = AnswerMatchAcc(type=\"exact_match\").compute_single_item\n", + " loss_fn = adal.EvalFnToTextLoss(\n", + " eval_fn=eval_fn,\n", + " eval_fn_desc=\"exact_match: 1 if str(y) == str(y_gt) else 0\",\n", + " )\n", + " super().__init__(\n", + " task=task,\n", + " eval_fn=eval_fn,\n", + " loss_fn=loss_fn,\n", + " backward_engine_model_config=backward_engine_model_config,\n", + " text_optimizer_model_config=text_optimizer_model_config,\n", + " teacher_model_config=teacher_model_config,\n", + " )\n", + "\n", + " def prepare_task(self, sample: TRECExtendedData):\n", + " return self.task.call, {\"question\": sample.question, \"id\": sample.id}\n", + "\n", + " def prepare_eval(\n", + " self, sample: TRECExtendedData, y_pred: adal.GeneratorOutput\n", + " ) -> float:\n", + " y_label = -1\n", + " if y_pred and y_pred.data is not None and y_pred.data.class_name is not None:\n", + " y_label = y_pred.data.class_name\n", + " return self.eval_fn, {\"y\": y_label, \"y_gt\": sample.class_name}\n", + "\n", + " def prepare_loss(\n", + " self, sample: TRECExtendedData, y_pred: adal.Parameter, *args, **kwargs\n", + " ) -> Tuple[Callable[..., Any], Dict]:\n", + " full_response = y_pred.full_response\n", + " y_label = -1\n", + " if (\n", + " full_response\n", + " and full_response.data is not None\n", + " and full_response.data.class_name is not None\n", + " ):\n", + " y_label = full_response.data.class_name\n", + "\n", + " y_pred.eval_input = y_label\n", + " y_gt = adal.Parameter(\n", + " name=\"y_gt\",\n", + " data=sample.class_name,\n", + " eval_input=sample.class_name,\n", + " requires_opt=False,\n", + " )\n", + " return self.loss_fn, {\"kwargs\": {\"y\": y_pred, \"y_gt\": y_gt}}" + ], + "metadata": { + "id": "HpkQYsh2NevT" + }, + "execution_count": 51, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def train(\n", + " model_client: adal.ModelClient,\n", + " model_kwargs: Dict,\n", + " train_batch_size=4,\n", + " raw_shots: int = 0,\n", + " bootstrap_shots: int = 1,\n", + " max_steps=12,\n", + " num_workers=4,\n", + " strategy=\"constrained\",\n", + " optimization_order=\"sequential\",\n", + " debug=False,\n", + "):\n", + " print(\"Starting training process...\")\n", + "\n", + " # Define the model configuration for all components\n", + " gpt_4o_model = {\n", + " \"model\": \"gpt-4-turbo-preview\",\n", + " \"temperature\": 0,\n", + " \"max_tokens\": 1000,\n", + " \"top_p\": 1,\n", + " \"frequency_penalty\": 0,\n", + " \"presence_penalty\": 0,\n", + " }\n", + " print(f\"Component model configuration: {gpt_4o_model}\")\n", + "\n", + " try:\n", + " print(\"Initializing ADAL component...\")\n", + " adal_component = TrecClassifierAdal(\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " text_optimizer_model_config=gpt_4o_model,\n", + " backward_engine_model_config=gpt_4o_model,\n", + " teacher_model_config=gpt_4o_model,\n", + " )\n", + " print(\"ADAL component initialized successfully\")\n", + "\n", + " print(\"Initializing trainer...\")\n", + " trainer = adal.Trainer(\n", + " train_batch_size=train_batch_size,\n", + " adaltask=adal_component,\n", + " strategy=strategy,\n", + " max_steps=max_steps,\n", + " num_workers=num_workers,\n", + " raw_shots=raw_shots,\n", + " bootstrap_shots=bootstrap_shots,\n", + " debug=debug,\n", + " weighted_sampling=True,\n", + " optimization_order=optimization_order,\n", + " exclude_input_fields_from_bootstrap_demos=True,\n", + " )\n", + " print(\"Trainer initialized successfully\")\n", + "\n", + " print(\"Loading datasets...\")\n", + " train_dataset, val_dataset, test_dataset = load_datasets()\n", + " print(\n", + " f\"Datasets loaded - Train size: {len(train_dataset)}, Val size: {len(val_dataset)}, Test size: {len(test_dataset)}\"\n", + " )\n", + "\n", + " print(\"Starting model training...\")\n", + " trainer.fit(\n", + " train_dataset=train_dataset,\n", + " val_dataset=test_dataset,\n", + " debug=debug,\n", + " )\n", + " print(\"Training completed successfully\")\n", + "\n", + " except Exception as e:\n", + " print(f\"Error occurred: {str(e)}\")\n", + " raise" + ], + "metadata": { + "id": "PEj6xiZ5dVaj" + }, + "execution_count": 52, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.components.model_client.openai_client import OpenAIClient\n", + "\n", + "\n", + "gpt_4o_model = {\n", + " \"model_client\": OpenAIClient(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-4o-mini\",\n", + " \"max_tokens\": 2000,\n", + " },\n", + "}\n", + "\n", + "\n", + "train(\n", + " model_client=OpenAIClient(),\n", + " model_kwargs=gpt_4o_model,\n", + ")" + ], + "metadata": { + "id": "GnlZBQOMEj6E", + "collapsed": true + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "# Issues and feedback\n", + "\n", + "If you encounter any issues, please report them here: [GitHub Issues](https://github.com/SylphAI-Inc/LightRAG/issues).\n", + "\n", + "For feedback, you can use either the [GitHub discussions](https://github.com/SylphAI-Inc/LightRAG/discussions) or [Discord](https://discord.gg/ezzszrRZvT)." + ], + "metadata": { + "id": "AmkbyxmuruUu" + } + } + ] } diff --git a/notebooks/tutorials/adalflow_component.ipynb b/notebooks/tutorials/adalflow_component.ipynb index 2da8aa78..8523a629 100644 --- a/notebooks/tutorials/adalflow_component.ipynb +++ b/notebooks/tutorials/adalflow_component.ipynb @@ -1,985 +1,994 @@ { - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 🤗 Welcome to AdalFlow!\n", - "## The library to build & auto-optimize any LLM task pipelines\n", - "\n", - "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", - "\n", - "\n", - "# Quick Links\n", - "\n", - "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", - "\n", - "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", - "\n", - "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", - "\n", - "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", - "\n", - "# Author\n", - "\n", - "This notebook was created by community contributor [Ajith](https://github.com/ajithvcoder).\n", - "\n", - "# Outline\n", - "\n", - "This is a quick introduction of what AdalFlow is capable of. We will cover:\n", - "\n", - "* How to use `DataClass` with `DataClassParser`.\n", - "* How to do nested dataclass, we will test both one and two levels of nesting.\n", - "\n", - "**Next: Try our [auto-optimization](https://colab.research.google.com/drive/1n3mHUWekTEYHiBdYBTw43TKlPN41A9za?usp=sharing)**\n", - "\n", - "\n", - "# Installation\n", - "\n", - "1. Use `pip` to install the `adalflow` Python package. We will need `openai` and `groq`from the extra packages.\n", - "\n", - " ```bash\n", - " pip install adalflow[openai,groq]\n", - " ```\n", - "2. Setup `openai` and `groq` API key in the environment variables" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "id": "Ab_OmE6XTl4h" - }, - "outputs": [], - "source": [ - "from IPython.display import clear_output\n", - "\n", - "!pip install -U adalflow[openai,groq,datasets]\n", - "\n", - "clear_output()" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "id": "PbAIsBeeTQUk" - }, - "outputs": [], - "source": [ - "import re\n", - "from adalflow.core import Component, Generator\n", - "from adalflow.components.model_client import OpenAIClient\n", - "from adalflow.components.model_client import GroqAPIClient\n", - "from adalflow.utils import setup_env # make sure you have a .env file with OPENAI_API_KEY and GROQ_API_KEY" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "kRymwpwHTQUm", - "outputId": "6a992f52-1661-4002-ef74-ed26938c6baa" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Please enter your OpenAI API key: ··········\n", - "API keys have been set.\n" - ] - } - ], - "source": [ - "from getpass import getpass\n", - "import os\n", - "\n", - "# Prompt user to enter their API keys securely\n", - "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", - "\n", - "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", - "\n", - "print(\"API keys have been set.\")" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "id": "czGDvnVUTQUm" - }, - "outputs": [], - "source": [ - "template_doc = r\"\"\" You are a doctor User: {{input_str}}\"\"\"" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "PPs3gHqeTQUn" - }, - "source": [ - "Let's turn on the library log to help with debugging." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "98QNsOcSTQUn", - "outputId": "d63cba1b-6087-4b04-bb2b-0a9d9d4500a5" - }, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from adalflow.utils import get_logger\n", - "get_logger()" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "id": "b3ey1lozTQUo" - }, - "outputs": [], - "source": [ - "#Toy example\n", - "\n", - "class DocQA(Component):\n", - " def __init__(self):\n", - " super(DocQA, self).__init__()\n", - " self.doc = Generator(\n", - " template=template_doc,\n", - " model_client=OpenAIClient(),\n", - " model_kwargs={\"model\": \"gpt-3.5-turbo\"},\n", - " )\n", - "\n", - " def call(self, query: str) -> str:\n", - " return self.doc(prompt_kwargs={\"input_str\": query}).data\n" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "TZAHSrbUTQUo", - "outputId": "66e81fb3-17f9-4570-dbbd-681cad1afc65" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-11-11 17:40:52 - prompt_builder - INFO - [prompt_builder.py:65:__init__] - Prompt has variables: ['input_str']\n", - "2024-11-11 17:40:52 - generator - INFO - [generator.py:144:__init__] - Generator Generator initialized.\n" - ] - } - ], - "source": [ - "doc = DocQA()" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "f-y6l44PTQUp", - "outputId": "e24aabd5-d758-4700-fa0d-46b66a88c412" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'type': 'DocQA', 'data': {'_components': {'_ordered_dict': True, 'data': [('doc', {'type': 'Generator', 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo', 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'), 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []}, 'cache': , '_components': {'_ordered_dict': True, 'data': [('prompt', {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Prompt', '_init_args': {'template': None, 'prompt_kwargs': {}}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': ['input_str'], 'prompt_kwargs': {}}}), ('model_client', {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Generator', '_init_args': {'model_client': None, 'model_kwargs': {}, 'template': None, 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, 'backward_engine': None, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'output_processors': None, 'mock_output': False, 'mock_output_data': 'mock data', 'data_map_func': .default_map_func at 0x7b8d471c97e0>, '_use_cache': False, '_kwargs': {'model_client': {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, '_teacher': None}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'DocQA', '_init_args': {}}}\n" - ] - }, - { - "data": { - "text/plain": [ - "{'_components': OrderedDict([('doc',\n", - " Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", - " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", - " (model_client): OpenAIClient()\n", - " ))]),\n", - " '_parameters': OrderedDict(),\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'DocQA',\n", - " '_init_args': {}}" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# states\n", - "states = doc.to_dict()\n", - "print(states)\n", - "doc.__dict__" - ] + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 🤗 Welcome to AdalFlow!\n", + "## The library to build & auto-optimize any LLM task pipelines\n", + "\n", + "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", + "\n", + "\n", + "# Quick Links\n", + "\n", + "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", + "\n", + "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", + "\n", + "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", + "\n", + "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", + "\n", + "# Author\n", + "\n", + "This notebook was created by community contributor [Ajith](https://github.com/ajithvcoder).\n", + "\n", + "# Outline\n", + "\n", + "This is a quick introduction of what AdalFlow is capable of. We will cover:\n", + "\n", + "* How to use `DataClass` with `DataClassParser`.\n", + "* How to do nested dataclass, we will test both one and two levels of nesting.\n", + "\n", + "**Next: Try our [auto-optimization](https://colab.research.google.com/drive/1n3mHUWekTEYHiBdYBTw43TKlPN41A9za?usp=sharing)**\n", + "\n", + "\n", + "# Installation\n", + "\n", + "1. Use `pip` to install the `adalflow` Python package. We will need `openai` and `groq`from the extra packages.\n", + "\n", + " ```bash\n", + " pip install adalflow[openai,groq]\n", + " ```\n", + "2. Setup `openai` and `groq` API key in the environment variables" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "id": "Ab_OmE6XTl4h" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,datasets]\n", + "\n", + "clear_output()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "id": "PbAIsBeeTQUk" + }, + "outputs": [], + "source": [ + "import re\n", + "from adalflow.core import Component, Generator\n", + "from adalflow.components.model_client import OpenAIClient\n", + "from adalflow.components.model_client import GroqAPIClient\n", + "from adalflow.utils import (\n", + " setup_env,\n", + ") # make sure you have a .env file with OPENAI_API_KEY and GROQ_API_KEY" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "kRymwpwHTQUm", + "outputId": "6a992f52-1661-4002-ef74-ed26938c6baa" + }, + "outputs": [ { - "cell_type": "markdown", - "metadata": { - "id": "z_sH59_bTQUp" - }, - "source": [] + "name": "stdout", + "output_type": "stream", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "API keys have been set.\n" + ] + } + ], + "source": [ + "from getpass import getpass\n", + "import os\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "id": "czGDvnVUTQUm" + }, + "outputs": [], + "source": [ + "template_doc = r\"\"\" You are a doctor User: {{input_str}}\"\"\"" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "PPs3gHqeTQUn" + }, + "source": [ + "Let's turn on the library log to help with debugging." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "98QNsOcSTQUn", + "outputId": "d63cba1b-6087-4b04-bb2b-0a9d9d4500a5" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 13, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "P81kIS2qTQUp", - "outputId": "d8e0e398-d704-4a85-8692-66a8c570b910" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict Generator, {'type': 'Generator', 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo', 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'), 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []}, 'cache': , '_components': {'_ordered_dict': True, 'data': [('prompt', {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Prompt', '_init_args': {'template': None, 'prompt_kwargs': {}}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': ['input_str'], 'prompt_kwargs': {}}}), ('model_client', {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Generator', '_init_args': {'model_client': None, 'model_kwargs': {}, 'template': None, 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, 'backward_engine': None, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'output_processors': None, 'mock_output': False, 'mock_output_data': 'mock data', 'data_map_func': .default_map_func at 0x7b8d471c97e0>, '_use_cache': False, '_kwargs': {'model_client': {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, '_teacher': None}}\n", - "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict Prompt, {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Prompt', '_init_args': {'template': None, 'prompt_kwargs': {}}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': ['input_str'], 'prompt_kwargs': {}}}\n", - "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict OpenAIClient, {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}\n", - "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict OpenAIClient, {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}\n" - ] - }, - { - "data": { - "text/plain": [ - "{'_components': OrderedDict([('doc',\n", - " Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", - " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", - " (model_client): OpenAIClient()\n", - " ))]),\n", - " '_parameters': OrderedDict(),\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'DocQA',\n", - " '_init_args': {}}" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# restore the states\n", - "doc2 = DocQA.from_dict(states)\n", - "# print(doc2.call(\"What is the capital of France?\"))\n", - "doc2.__dict__\n", - "# doc2.to_dict()" + "data": { + "text/plain": [ + "" ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from adalflow.utils import get_logger\n", + "\n", + "get_logger()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "id": "b3ey1lozTQUo" + }, + "outputs": [], + "source": [ + "# Toy example\n", + "\n", + "\n", + "class DocQA(Component):\n", + " def __init__(self):\n", + " super(DocQA, self).__init__()\n", + " self.doc = Generator(\n", + " template=template_doc,\n", + " model_client=OpenAIClient(),\n", + " model_kwargs={\"model\": \"gpt-3.5-turbo\"},\n", + " )\n", + "\n", + " def call(self, query: str) -> str:\n", + " return self.doc(prompt_kwargs={\"input_str\": query}).data" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "TZAHSrbUTQUo", + "outputId": "66e81fb3-17f9-4570-dbbd-681cad1afc65" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "198xYpLGTQUp", - "outputId": "ffd33d12-6db0-45c2-dfb1-3d57460ad4c9" - }, - "outputs": [ - { - "data": { - "text/plain": [ - "{'type': 'DocQA',\n", - " 'data': {'_components': {'_ordered_dict': True,\n", - " 'data': [('doc',\n", - " {'type': 'Generator',\n", - " 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo',\n", - " 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'),\n", - " 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []},\n", - " 'cache': ,\n", - " '_components': {'_ordered_dict': True,\n", - " 'data': [('prompt',\n", - " {'type': 'Prompt',\n", - " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'Prompt',\n", - " '_init_args': {'template': None, 'prompt_kwargs': {}},\n", - " 'template': ' You are a doctor User: {{input_str}}',\n", - " 'prompt_variables': ['input_str'],\n", - " 'prompt_kwargs': {}}}),\n", - " ('model_client',\n", - " {'type': 'OpenAIClient',\n", - " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'OpenAIClient',\n", - " '_init_args': {'api_key': None,\n", - " 'chat_completion_parser': None,\n", - " 'input_type': 'text'},\n", - " '_api_key': None,\n", - " 'chat_completion_parser': str>,\n", - " '_input_type': 'text'}})]},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'Generator',\n", - " '_init_args': {'model_client': None,\n", - " 'model_kwargs': {},\n", - " 'template': None,\n", - " 'prompt_kwargs': {},\n", - " 'output_processors': None,\n", - " 'name': None,\n", - " 'cache_path': None,\n", - " 'use_cache': False},\n", - " 'backward_engine': None,\n", - " 'template': ' You are a doctor User: {{input_str}}',\n", - " 'prompt_kwargs': {},\n", - " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", - " 'output_processors': None,\n", - " 'mock_output': False,\n", - " 'mock_output_data': 'mock data',\n", - " 'data_map_func': .default_map_func(data: 'GeneratorOutputType') -> str>,\n", - " '_use_cache': False,\n", - " '_kwargs': {'model_client': {'type': 'OpenAIClient',\n", - " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'OpenAIClient',\n", - " '_init_args': {'api_key': None,\n", - " 'chat_completion_parser': None,\n", - " 'input_type': 'text'},\n", - " '_api_key': None,\n", - " 'chat_completion_parser': str>,\n", - " '_input_type': 'text'}},\n", - " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", - " 'template': ' You are a doctor User: {{input_str}}',\n", - " 'prompt_kwargs': {},\n", - " 'output_processors': None,\n", - " 'name': None,\n", - " 'cache_path': None,\n", - " 'use_cache': False},\n", - " '_teacher': None}})]},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'DocQA',\n", - " '_init_args': {}}}" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "doc2.to_dict() == doc.to_dict()\n", - "doc2.to_dict()" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "2024-11-11 17:40:52 - prompt_builder - INFO - [prompt_builder.py:65:__init__] - Prompt has variables: ['input_str']\n", + "2024-11-11 17:40:52 - generator - INFO - [generator.py:144:__init__] - Generator Generator initialized.\n" + ] + } + ], + "source": [ + "doc = DocQA()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "f-y6l44PTQUp", + "outputId": "e24aabd5-d758-4700-fa0d-46b66a88c412" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 15, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "Ulb1OWxxTQUq", - "outputId": "99972fcd-ed52-43b4-e461-a76c19bd9522" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-11-11 17:41:29 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for headache?'}]}\n", - "2024-11-11 17:41:30 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", - "2024-11-11 17:41:30 - generator - INFO - [generator.py:798:call] - output: GeneratorOutput(id=None, data='As a doctor, the best treatment for a headache depends on the underlying cause of the headache. In general, for tension headaches or migraines, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help alleviate symptoms. It is also important to rest in a quiet, dark room and stay hydrated. If headaches are frequent or severe, it is important to consult with a healthcare provider for further evaluation and treatment options.', error=None, usage=CompletionUsage(completion_tokens=92, prompt_tokens=27, total_tokens=119), raw_response='As a doctor, the best treatment for a headache depends on the underlying cause of the headache. In general, for tension headaches or migraines, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help alleviate symptoms. It is also important to rest in a quiet, dark room and stay hydrated. If headaches are frequent or severe, it is important to consult with a healthcare provider for further evaluation and treatment options.', metadata=None)\n", - "As a doctor, the best treatment for a headache depends on the underlying cause of the headache. In general, for tension headaches or migraines, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help alleviate symptoms. It is also important to rest in a quiet, dark room and stay hydrated. If headaches are frequent or severe, it is important to consult with a healthcare provider for further evaluation and treatment options.\n" - ] - } - ], - "source": [ - "print(doc(\"What is the best treatment for headache?\"))" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "{'type': 'DocQA', 'data': {'_components': {'_ordered_dict': True, 'data': [('doc', {'type': 'Generator', 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo', 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'), 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []}, 'cache': , '_components': {'_ordered_dict': True, 'data': [('prompt', {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Prompt', '_init_args': {'template': None, 'prompt_kwargs': {}}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': ['input_str'], 'prompt_kwargs': {}}}), ('model_client', {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Generator', '_init_args': {'model_client': None, 'model_kwargs': {}, 'template': None, 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, 'backward_engine': None, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'output_processors': None, 'mock_output': False, 'mock_output_data': 'mock data', 'data_map_func': .default_map_func at 0x7b8d471c97e0>, '_use_cache': False, '_kwargs': {'model_client': {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, '_teacher': None}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'DocQA', '_init_args': {}}}\n" + ] }, { - "cell_type": "code", - "execution_count": 16, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "POVal8CgTQUq", - "outputId": "2fadb1d6-b858-4964-9045-8ea7454178e3" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-11-11 17:41:35 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for headache?'}]}\n", - "2024-11-11 17:41:36 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", - "2024-11-11 17:41:36 - generator - INFO - [generator.py:798:call] - output: GeneratorOutput(id=None, data='As a doctor, the best treatment for a headache will depend on the underlying cause of the headache. In general, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help relieve mild to moderate headaches. It is also important to stay hydrated, get adequate rest, manage stress, and practice good posture. If the headache persists or is severe, it is important to see a healthcare provider for further evaluation and treatment.', error=None, usage=CompletionUsage(completion_tokens=92, prompt_tokens=27, total_tokens=119), raw_response='As a doctor, the best treatment for a headache will depend on the underlying cause of the headache. In general, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help relieve mild to moderate headaches. It is also important to stay hydrated, get adequate rest, manage stress, and practice good posture. If the headache persists or is severe, it is important to see a healthcare provider for further evaluation and treatment.', metadata=None)\n", - "As a doctor, the best treatment for a headache will depend on the underlying cause of the headache. In general, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help relieve mild to moderate headaches. It is also important to stay hydrated, get adequate rest, manage stress, and practice good posture. If the headache persists or is severe, it is important to see a healthcare provider for further evaluation and treatment.\n" - ] - } - ], - "source": [ - "print(doc2(\"What is the best treatment for headache?\"))" + "data": { + "text/plain": [ + "{'_components': OrderedDict([('doc',\n", + " Generator(\n", + " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", + " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", + " (model_client): OpenAIClient()\n", + " ))]),\n", + " '_parameters': OrderedDict(),\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'DocQA',\n", + " '_init_args': {}}" ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# states\n", + "states = doc.to_dict()\n", + "print(states)\n", + "doc.__dict__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "z_sH59_bTQUp" + }, + "source": [] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "P81kIS2qTQUp", + "outputId": "d8e0e398-d704-4a85-8692-66a8c570b910" + }, + "outputs": [ { - "cell_type": "markdown", - "metadata": { - "id": "R5gTO1-8TQUr" - }, - "source": [] + "name": "stdout", + "output_type": "stream", + "text": [ + "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict Generator, {'type': 'Generator', 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo', 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'), 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []}, 'cache': , '_components': {'_ordered_dict': True, 'data': [('prompt', {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Prompt', '_init_args': {'template': None, 'prompt_kwargs': {}}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': ['input_str'], 'prompt_kwargs': {}}}), ('model_client', {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}})]}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Generator', '_init_args': {'model_client': None, 'model_kwargs': {}, 'template': None, 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, 'backward_engine': None, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'output_processors': None, 'mock_output': False, 'mock_output_data': 'mock data', 'data_map_func': .default_map_func at 0x7b8d471c97e0>, '_use_cache': False, '_kwargs': {'model_client': {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}, 'model_kwargs': {'model': 'gpt-3.5-turbo'}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_kwargs': {}, 'output_processors': None, 'name': None, 'cache_path': None, 'use_cache': False}, '_teacher': None}}\n", + "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict Prompt, {'type': 'Prompt', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'Prompt', '_init_args': {'template': None, 'prompt_kwargs': {}}, 'template': ' You are a doctor User: {{input_str}}', 'prompt_variables': ['input_str'], 'prompt_kwargs': {}}}\n", + "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict OpenAIClient, {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}\n", + "2024-11-11 17:40:58 - component - INFO - [component.py:350:_restore_value] - Restoring class using from_dict OpenAIClient, {'type': 'OpenAIClient', 'data': {'_components': {'_ordered_dict': True, 'data': []}, '_parameters': {'_ordered_dict': True, 'data': []}, 'training': False, 'teacher_mode': False, 'tracing': False, 'name': 'OpenAIClient', '_init_args': {'api_key': None, 'chat_completion_parser': None, 'input_type': 'text'}, '_api_key': None, 'chat_completion_parser': , '_input_type': 'text'}}\n" + ] }, { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "jhgSpKrMTQUr", - "outputId": "15615bf7-2b72-4ac7-d1fe-f436a7304734" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "('', DocQA(\n", - " (doc): Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", - " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", - " (model_client): OpenAIClient()\n", - " )\n", - "))\n", - "('doc', Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", - " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", - " (model_client): OpenAIClient()\n", - "))\n", - "('doc.prompt', Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str']))\n", - "('doc.model_client', OpenAIClient())\n" - ] - } - ], - "source": [ - "# list other subcomponents\n", - "\n", - "for subcomponent in doc.named_components():\n", - " print(subcomponent)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "XjIHAY6bTQUr" - }, - "source": [ - "Let's add a parameter" + "data": { + "text/plain": [ + "{'_components': OrderedDict([('doc',\n", + " Generator(\n", + " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", + " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", + " (model_client): OpenAIClient()\n", + " ))]),\n", + " '_parameters': OrderedDict(),\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'DocQA',\n", + " '_init_args': {}}" ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# restore the states\n", + "doc2 = DocQA.from_dict(states)\n", + "# print(doc2.call(\"What is the capital of France?\"))\n", + "doc2.__dict__\n", + "# doc2.to_dict()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "198xYpLGTQUp", + "outputId": "ffd33d12-6db0-45c2-dfb1-3d57460ad4c9" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 18, - "metadata": { - "id": "vxgjAUiFTQUr" - }, - "outputs": [], - "source": [ - "from adalflow.optim.parameter import Parameter\n", - "\n", - "doc.register_parameter(\"demo\", param=Parameter(data=\"demo\"))" + "data": { + "text/plain": [ + "{'type': 'DocQA',\n", + " 'data': {'_components': {'_ordered_dict': True,\n", + " 'data': [('doc',\n", + " {'type': 'Generator',\n", + " 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo',\n", + " 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'),\n", + " 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []},\n", + " 'cache': ,\n", + " '_components': {'_ordered_dict': True,\n", + " 'data': [('prompt',\n", + " {'type': 'Prompt',\n", + " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'Prompt',\n", + " '_init_args': {'template': None, 'prompt_kwargs': {}},\n", + " 'template': ' You are a doctor User: {{input_str}}',\n", + " 'prompt_variables': ['input_str'],\n", + " 'prompt_kwargs': {}}}),\n", + " ('model_client',\n", + " {'type': 'OpenAIClient',\n", + " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'OpenAIClient',\n", + " '_init_args': {'api_key': None,\n", + " 'chat_completion_parser': None,\n", + " 'input_type': 'text'},\n", + " '_api_key': None,\n", + " 'chat_completion_parser': str>,\n", + " '_input_type': 'text'}})]},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'Generator',\n", + " '_init_args': {'model_client': None,\n", + " 'model_kwargs': {},\n", + " 'template': None,\n", + " 'prompt_kwargs': {},\n", + " 'output_processors': None,\n", + " 'name': None,\n", + " 'cache_path': None,\n", + " 'use_cache': False},\n", + " 'backward_engine': None,\n", + " 'template': ' You are a doctor User: {{input_str}}',\n", + " 'prompt_kwargs': {},\n", + " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", + " 'output_processors': None,\n", + " 'mock_output': False,\n", + " 'mock_output_data': 'mock data',\n", + " 'data_map_func': .default_map_func(data: 'GeneratorOutputType') -> str>,\n", + " '_use_cache': False,\n", + " '_kwargs': {'model_client': {'type': 'OpenAIClient',\n", + " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'OpenAIClient',\n", + " '_init_args': {'api_key': None,\n", + " 'chat_completion_parser': None,\n", + " 'input_type': 'text'},\n", + " '_api_key': None,\n", + " 'chat_completion_parser': str>,\n", + " '_input_type': 'text'}},\n", + " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", + " 'template': ' You are a doctor User: {{input_str}}',\n", + " 'prompt_kwargs': {},\n", + " 'output_processors': None,\n", + " 'name': None,\n", + " 'cache_path': None,\n", + " 'use_cache': False},\n", + " '_teacher': None}})]},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'DocQA',\n", + " '_init_args': {}}}" ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "doc2.to_dict() == doc.to_dict()\n", + "doc2.to_dict()" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "Ulb1OWxxTQUq", + "outputId": "99972fcd-ed52-43b4-e461-a76c19bd9522" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 19, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "86C-h1e1TQUr", - "outputId": "57cab4d0-eddf-433d-e364-5d7f07072fbf" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "('demo', Parameter(name=param_313f196d-3c48-4eb3-8138-b7bd74298fbd, requires_opt=True, param_type=none (), role_desc=, data=demo, predecessors=set(), gradients=[], raw_response=None, input_args=None, traces={}))\n" - ] - } - ], - "source": [ - "# list all parameters\n", - "for param in doc.named_parameters():\n", - " print(param)" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "2024-11-11 17:41:29 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for headache?'}]}\n", + "2024-11-11 17:41:30 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "2024-11-11 17:41:30 - generator - INFO - [generator.py:798:call] - output: GeneratorOutput(id=None, data='As a doctor, the best treatment for a headache depends on the underlying cause of the headache. In general, for tension headaches or migraines, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help alleviate symptoms. It is also important to rest in a quiet, dark room and stay hydrated. If headaches are frequent or severe, it is important to consult with a healthcare provider for further evaluation and treatment options.', error=None, usage=CompletionUsage(completion_tokens=92, prompt_tokens=27, total_tokens=119), raw_response='As a doctor, the best treatment for a headache depends on the underlying cause of the headache. In general, for tension headaches or migraines, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help alleviate symptoms. It is also important to rest in a quiet, dark room and stay hydrated. If headaches are frequent or severe, it is important to consult with a healthcare provider for further evaluation and treatment options.', metadata=None)\n", + "As a doctor, the best treatment for a headache depends on the underlying cause of the headache. In general, for tension headaches or migraines, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help alleviate symptoms. It is also important to rest in a quiet, dark room and stay hydrated. If headaches are frequent or severe, it is important to consult with a healthcare provider for further evaluation and treatment options.\n" + ] + } + ], + "source": [ + "print(doc(\"What is the best treatment for headache?\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "POVal8CgTQUq", + "outputId": "2fadb1d6-b858-4964-9045-8ea7454178e3" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 20, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "_s2MPukiTQUr", - "outputId": "b51c7d09-fb52-42d9-b2d5-4f44f5d22dc9" - }, - "outputs": [ - { - "data": { - "text/plain": [ - "{'type': 'DocQA',\n", - " 'data': {'_components': {'_ordered_dict': True,\n", - " 'data': [('doc',\n", - " {'type': 'Generator',\n", - " 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo',\n", - " 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'),\n", - " 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []},\n", - " 'cache': ,\n", - " '_components': {'_ordered_dict': True,\n", - " 'data': [('prompt',\n", - " {'type': 'Prompt',\n", - " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'Prompt',\n", - " '_init_args': {'template': None, 'prompt_kwargs': {}},\n", - " 'template': ' You are a doctor User: {{input_str}}',\n", - " 'prompt_variables': ['input_str'],\n", - " 'prompt_kwargs': {}}}),\n", - " ('model_client',\n", - " {'type': 'OpenAIClient',\n", - " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'OpenAIClient',\n", - " '_init_args': {'api_key': None,\n", - " 'chat_completion_parser': None,\n", - " 'input_type': 'text'},\n", - " '_api_key': None,\n", - " 'chat_completion_parser': str>,\n", - " '_input_type': 'text'}})]},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'Generator',\n", - " '_init_args': {'model_client': None,\n", - " 'model_kwargs': {},\n", - " 'template': None,\n", - " 'prompt_kwargs': {},\n", - " 'output_processors': None,\n", - " 'name': None,\n", - " 'cache_path': None,\n", - " 'use_cache': False},\n", - " 'backward_engine': None,\n", - " 'template': ' You are a doctor User: {{input_str}}',\n", - " 'prompt_kwargs': {},\n", - " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", - " 'output_processors': None,\n", - " 'mock_output': False,\n", - " 'mock_output_data': 'mock data',\n", - " 'data_map_func': .default_map_func(data: 'GeneratorOutputType') -> str>,\n", - " '_use_cache': False,\n", - " '_kwargs': {'model_client': {'type': 'OpenAIClient',\n", - " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", - " '_parameters': {'_ordered_dict': True, 'data': []},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'OpenAIClient',\n", - " '_init_args': {'api_key': None,\n", - " 'chat_completion_parser': None,\n", - " 'input_type': 'text'},\n", - " '_api_key': None,\n", - " 'chat_completion_parser': str>,\n", - " '_input_type': 'text'}},\n", - " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", - " 'template': ' You are a doctor User: {{input_str}}',\n", - " 'prompt_kwargs': {},\n", - " 'output_processors': None,\n", - " 'name': None,\n", - " 'cache_path': None,\n", - " 'use_cache': False},\n", - " '_teacher': None}})]},\n", - " '_parameters': {'_ordered_dict': True,\n", - " 'data': [('demo',\n", - " {'name': 'param_313f196d-3c48-4eb3-8138-b7bd74298fbd',\n", - " 'role_desc': '',\n", - " 'data': 'demo',\n", - " 'requires_opt': True,\n", - " 'param_type': 'none ()',\n", - " 'predecessors': [],\n", - " 'gradients': [],\n", - " 'previous_data': None,\n", - " 'gradients_context': [],\n", - " 'grad_fn': 'None',\n", - " 'gradient_prompt': 'None',\n", - " 'raw_response': None,\n", - " 'score': None,\n", - " 'traces': {},\n", - " 'input_args': None,\n", - " 'demos': []})]},\n", - " 'training': False,\n", - " 'teacher_mode': False,\n", - " 'tracing': False,\n", - " 'name': 'DocQA',\n", - " '_init_args': {}}}" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "doc.to_dict()" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "2024-11-11 17:41:35 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for headache?'}]}\n", + "2024-11-11 17:41:36 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "2024-11-11 17:41:36 - generator - INFO - [generator.py:798:call] - output: GeneratorOutput(id=None, data='As a doctor, the best treatment for a headache will depend on the underlying cause of the headache. In general, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help relieve mild to moderate headaches. It is also important to stay hydrated, get adequate rest, manage stress, and practice good posture. If the headache persists or is severe, it is important to see a healthcare provider for further evaluation and treatment.', error=None, usage=CompletionUsage(completion_tokens=92, prompt_tokens=27, total_tokens=119), raw_response='As a doctor, the best treatment for a headache will depend on the underlying cause of the headache. In general, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help relieve mild to moderate headaches. It is also important to stay hydrated, get adequate rest, manage stress, and practice good posture. If the headache persists or is severe, it is important to see a healthcare provider for further evaluation and treatment.', metadata=None)\n", + "As a doctor, the best treatment for a headache will depend on the underlying cause of the headache. In general, over-the-counter pain medications such as acetaminophen, ibuprofen, or aspirin can help relieve mild to moderate headaches. It is also important to stay hydrated, get adequate rest, manage stress, and practice good posture. If the headache persists or is severe, it is important to see a healthcare provider for further evaluation and treatment.\n" + ] + } + ], + "source": [ + "print(doc2(\"What is the best treatment for headache?\"))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "R5gTO1-8TQUr" + }, + "source": [] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "jhgSpKrMTQUr", + "outputId": "15615bf7-2b72-4ac7-d1fe-f436a7304734" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "id": "mcIO1DuVTQUr" - }, - "outputs": [], - "source": [ - "from adalflow.utils.file_io import save_json\n", - "\n", - "save_json(doc.to_dict(), \"doc.json\")" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "('', DocQA(\n", + " (doc): Generator(\n", + " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", + " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", + " (model_client): OpenAIClient()\n", + " )\n", + "))\n", + "('doc', Generator(\n", + " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", + " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", + " (model_client): OpenAIClient()\n", + "))\n", + "('doc.prompt', Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str']))\n", + "('doc.model_client', OpenAIClient())\n" + ] + } + ], + "source": [ + "# list other subcomponents\n", + "\n", + "for subcomponent in doc.named_components():\n", + " print(subcomponent)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "XjIHAY6bTQUr" + }, + "source": [ + "Let's add a parameter" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "id": "vxgjAUiFTQUr" + }, + "outputs": [], + "source": [ + "from adalflow.optim.parameter import Parameter\n", + "\n", + "doc.register_parameter(\"demo\", param=Parameter(data=\"demo\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "86C-h1e1TQUr", + "outputId": "57cab4d0-eddf-433d-e364-5d7f07072fbf" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 22, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "0vvO0nogTQUr", - "outputId": "59131d9e-a996-4c8b-f32c-9a6a623d3db6" - }, - "outputs": [ - { - "data": { - "text/plain": [ - "OrderedDict([('demo',\n", - " Parameter(name=param_313f196d-3c48-4eb3-8138-b7bd74298fbd, requires_opt=True, param_type=none (), role_desc=, data=demo, predecessors=set(), gradients=[], raw_response=None, input_args=None, traces={}))])" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "doc.state_dict()" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "('demo', Parameter(name=param_313f196d-3c48-4eb3-8138-b7bd74298fbd, requires_opt=True, param_type=none (), role_desc=, data=demo, predecessors=set(), gradients=[], raw_response=None, input_args=None, traces={}))\n" + ] + } + ], + "source": [ + "# list all parameters\n", + "for param in doc.named_parameters():\n", + " print(param)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "_s2MPukiTQUr", + "outputId": "b51c7d09-fb52-42d9-b2d5-4f44f5d22dc9" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 23, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 125 - }, - "id": "uroqi93tTQUs", - "outputId": "8a3e4ecc-1368-475b-dc4d-2ff38821b8ac" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-11-11 17:42:18 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for a cold?'}]}\n", - "2024-11-11 17:42:19 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", - "2024-11-11 17:42:19 - generator - INFO - [generator.py:798:call] - output: GeneratorOutput(id=None, data='As a doctor, I recommend getting plenty of rest, staying hydrated, and taking over-the-counter medications like ibuprofen or acetaminophen to help relieve symptoms such as fever and congestion. Additionally, you can try using saline nasal sprays or lozenges to help soothe a sore throat. If your symptoms persist or worsen, it is best to consult with a healthcare provider for further evaluation and treatment.', error=None, usage=CompletionUsage(completion_tokens=85, prompt_tokens=28, total_tokens=113), raw_response='As a doctor, I recommend getting plenty of rest, staying hydrated, and taking over-the-counter medications like ibuprofen or acetaminophen to help relieve symptoms such as fever and congestion. Additionally, you can try using saline nasal sprays or lozenges to help soothe a sore throat. If your symptoms persist or worsen, it is best to consult with a healthcare provider for further evaluation and treatment.', metadata=None)\n" - ] - }, - { - "data": { - "application/vnd.google.colaboratory.intrinsic+json": { - "type": "string" - }, - "text/plain": [ - "'As a doctor, I recommend getting plenty of rest, staying hydrated, and taking over-the-counter medications like ibuprofen or acetaminophen to help relieve symptoms such as fever and congestion. Additionally, you can try using saline nasal sprays or lozenges to help soothe a sore throat. If your symptoms persist or worsen, it is best to consult with a healthcare provider for further evaluation and treatment.'" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "doc.call(\"What is the best treatment for a cold?\")" + "data": { + "text/plain": [ + "{'type': 'DocQA',\n", + " 'data': {'_components': {'_ordered_dict': True,\n", + " 'data': [('doc',\n", + " {'type': 'Generator',\n", + " 'data': {'model_str': 'OpenAIClient_gpt-3_5-turbo',\n", + " 'cache_path': PosixPath('/root/.adalflow/cache_OpenAIClient_gpt-3_5-turbo.db'),\n", + " 'callbacks': {'on_success': [], 'on_failure': [], 'on_complete': []},\n", + " 'cache': ,\n", + " '_components': {'_ordered_dict': True,\n", + " 'data': [('prompt',\n", + " {'type': 'Prompt',\n", + " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'Prompt',\n", + " '_init_args': {'template': None, 'prompt_kwargs': {}},\n", + " 'template': ' You are a doctor User: {{input_str}}',\n", + " 'prompt_variables': ['input_str'],\n", + " 'prompt_kwargs': {}}}),\n", + " ('model_client',\n", + " {'type': 'OpenAIClient',\n", + " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'OpenAIClient',\n", + " '_init_args': {'api_key': None,\n", + " 'chat_completion_parser': None,\n", + " 'input_type': 'text'},\n", + " '_api_key': None,\n", + " 'chat_completion_parser': str>,\n", + " '_input_type': 'text'}})]},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'Generator',\n", + " '_init_args': {'model_client': None,\n", + " 'model_kwargs': {},\n", + " 'template': None,\n", + " 'prompt_kwargs': {},\n", + " 'output_processors': None,\n", + " 'name': None,\n", + " 'cache_path': None,\n", + " 'use_cache': False},\n", + " 'backward_engine': None,\n", + " 'template': ' You are a doctor User: {{input_str}}',\n", + " 'prompt_kwargs': {},\n", + " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", + " 'output_processors': None,\n", + " 'mock_output': False,\n", + " 'mock_output_data': 'mock data',\n", + " 'data_map_func': .default_map_func(data: 'GeneratorOutputType') -> str>,\n", + " '_use_cache': False,\n", + " '_kwargs': {'model_client': {'type': 'OpenAIClient',\n", + " 'data': {'_components': {'_ordered_dict': True, 'data': []},\n", + " '_parameters': {'_ordered_dict': True, 'data': []},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'OpenAIClient',\n", + " '_init_args': {'api_key': None,\n", + " 'chat_completion_parser': None,\n", + " 'input_type': 'text'},\n", + " '_api_key': None,\n", + " 'chat_completion_parser': str>,\n", + " '_input_type': 'text'}},\n", + " 'model_kwargs': {'model': 'gpt-3.5-turbo'},\n", + " 'template': ' You are a doctor User: {{input_str}}',\n", + " 'prompt_kwargs': {},\n", + " 'output_processors': None,\n", + " 'name': None,\n", + " 'cache_path': None,\n", + " 'use_cache': False},\n", + " '_teacher': None}})]},\n", + " '_parameters': {'_ordered_dict': True,\n", + " 'data': [('demo',\n", + " {'name': 'param_313f196d-3c48-4eb3-8138-b7bd74298fbd',\n", + " 'role_desc': '',\n", + " 'data': 'demo',\n", + " 'requires_opt': True,\n", + " 'param_type': 'none ()',\n", + " 'predecessors': [],\n", + " 'gradients': [],\n", + " 'previous_data': None,\n", + " 'gradients_context': [],\n", + " 'grad_fn': 'None',\n", + " 'gradient_prompt': 'None',\n", + " 'raw_response': None,\n", + " 'score': None,\n", + " 'traces': {},\n", + " 'input_args': None,\n", + " 'demos': []})]},\n", + " 'training': False,\n", + " 'teacher_mode': False,\n", + " 'tracing': False,\n", + " 'name': 'DocQA',\n", + " '_init_args': {}}}" ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "doc.to_dict()" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "id": "mcIO1DuVTQUr" + }, + "outputs": [], + "source": [ + "from adalflow.utils.file_io import save_json\n", + "\n", + "save_json(doc.to_dict(), \"doc.json\")" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "0vvO0nogTQUr", + "outputId": "59131d9e-a996-4c8b-f32c-9a6a623d3db6" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 24, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "mYSDr462TQUs", - "outputId": "82414c82-8feb-4667-90ed-91c594cc6a73" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2\n", - "\n" - ] - } - ], - "source": [ - "from adalflow.core.component import FunComponent\n", - "\n", - "def add_one(x):\n", - " return x + 1\n", - "\n", - "fun_component = FunComponent(add_one)\n", - "print(fun_component(1))\n", - "print(type(fun_component))\n", - "\n", - "# output:\n", - "# 2\n", - "# " + "data": { + "text/plain": [ + "OrderedDict([('demo',\n", + " Parameter(name=param_313f196d-3c48-4eb3-8138-b7bd74298fbd, requires_opt=True, param_type=none (), role_desc=, data=demo, predecessors=set(), gradients=[], raw_response=None, input_args=None, traces={}))])" ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "doc.state_dict()" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 125 }, + "id": "uroqi93tTQUs", + "outputId": "8a3e4ecc-1368-475b-dc4d-2ff38821b8ac" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 25, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "3MW1tpzRTQUs", - "outputId": "351b8922-1423-434a-f470-ff435a1962d2" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2\n", - "\n" - ] - } - ], - "source": [ - "from adalflow.core.component import fun_to_component\n", - "\n", - "fun_component = fun_to_component(add_one)\n", - "print(fun_component(1))\n", - "print(type(fun_component))\n", - "\n", - "# output:\n", - "# 2\n", - "# " - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "2024-11-11 17:42:18 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for a cold?'}]}\n", + "2024-11-11 17:42:19 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "2024-11-11 17:42:19 - generator - INFO - [generator.py:798:call] - output: GeneratorOutput(id=None, data='As a doctor, I recommend getting plenty of rest, staying hydrated, and taking over-the-counter medications like ibuprofen or acetaminophen to help relieve symptoms such as fever and congestion. Additionally, you can try using saline nasal sprays or lozenges to help soothe a sore throat. If your symptoms persist or worsen, it is best to consult with a healthcare provider for further evaluation and treatment.', error=None, usage=CompletionUsage(completion_tokens=85, prompt_tokens=28, total_tokens=113), raw_response='As a doctor, I recommend getting plenty of rest, staying hydrated, and taking over-the-counter medications like ibuprofen or acetaminophen to help relieve symptoms such as fever and congestion. Additionally, you can try using saline nasal sprays or lozenges to help soothe a sore throat. If your symptoms persist or worsen, it is best to consult with a healthcare provider for further evaluation and treatment.', metadata=None)\n" + ] }, { - "cell_type": "code", - "execution_count": 26, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "dxAoGrnQTQUs", - "outputId": "38c462a3-5abf-41f4-9231-746c8d0ffcb3" + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2\n", - "\n" - ] - } - ], - "source": [ - "# use it as a decorator\n", - "@fun_to_component\n", - "def add_one(x):\n", - " return x + 1\n", - "\n", - "print(add_one(1))\n", - "print(type(add_one))\n", - "\n", - "# output:\n", - "# 2\n", - "# " + "text/plain": [ + "'As a doctor, I recommend getting plenty of rest, staying hydrated, and taking over-the-counter medications like ibuprofen or acetaminophen to help relieve symptoms such as fever and congestion. Additionally, you can try using saline nasal sprays or lozenges to help soothe a sore throat. If your symptoms persist or worsen, it is best to consult with a healthcare provider for further evaluation and treatment.'" ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "doc.call(\"What is the best treatment for a cold?\")" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "mYSDr462TQUs", + "outputId": "82414c82-8feb-4667-90ed-91c594cc6a73" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 28, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "7BvJEP_mTQUs", - "outputId": "066281b8-a650-4c48-c786-312022198015" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-11-11 17:42:39 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for headache?Please be concise and only list the top treatments.'}]}\n", - "2024-11-11 17:42:40 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", - "2024-11-11 17:42:40 - generator - INFO - [generator.py:798:call] - output: GeneratorOutput(id=None, data='The top treatments for headache are rest, hydration, over-the-counter pain relievers such as ibuprofen or acetaminophen, and relaxation techniques such as deep breathing or meditation.', error=None, usage=CompletionUsage(completion_tokens=37, prompt_tokens=37, total_tokens=74), raw_response='The top treatments for headache are rest, hydration, over-the-counter pain relievers such as ibuprofen or acetaminophen, and relaxation techniques such as deep breathing or meditation.', metadata=None)\n", - "The top treatments for headache are rest, hydration, over-the-counter pain relievers such as ibuprofen or acetaminophen, and relaxation techniques such as deep breathing or meditation.\n" - ] - } - ], - "source": [ - "from adalflow.core import Sequential\n", - "\n", - "@fun_to_component\n", - "def enhance_query(query:str) -> str:\n", - " return query + \"Please be concise and only list the top treatments.\"\n", - "\n", - "seq = Sequential(enhance_query, doc)\n", - "\n", - "query = \"What is the best treatment for headache?\"\n", - "print(seq(query))" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "2\n", + "\n" + ] + } + ], + "source": [ + "from adalflow.core.component import FunComponent\n", + "\n", + "\n", + "def add_one(x):\n", + " return x + 1\n", + "\n", + "\n", + "fun_component = FunComponent(add_one)\n", + "print(fun_component(1))\n", + "print(type(fun_component))\n", + "\n", + "# output:\n", + "# 2\n", + "# " + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "3MW1tpzRTQUs", + "outputId": "351b8922-1423-434a-f470-ff435a1962d2" + }, + "outputs": [ { - "cell_type": "code", - "execution_count": 29, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "aoZ2w8RUTQUt", - "outputId": "115d0ccf-33d1-4464-a951-cf9f5476284b" - }, - "outputs": [ - { - "data": { - "text/plain": [ - "Sequential(\n", - " (0): EnhanceQueryComponent(fun_name=enhance_query)\n", - " (1): DocQA(\n", - " (doc): Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", - " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", - " (model_client): OpenAIClient()\n", - " )\n", - " )\n", - ")" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "seq" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "2\n", + "\n" + ] + } + ], + "source": [ + "from adalflow.core.component import fun_to_component\n", + "\n", + "fun_component = fun_to_component(add_one)\n", + "print(fun_component(1))\n", + "print(type(fun_component))\n", + "\n", + "# output:\n", + "# 2\n", + "# " + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "dxAoGrnQTQUs", + "outputId": "38c462a3-5abf-41f4-9231-746c8d0ffcb3" + }, + "outputs": [ { - "cell_type": "markdown", - "metadata": { - "id": "F-ffAlC6TQUt" - }, - "source": [ - "# TODO: LLM for single choices" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "2\n", + "\n" + ] + } + ], + "source": [ + "# use it as a decorator\n", + "@fun_to_component\n", + "def add_one(x):\n", + " return x + 1\n", + "\n", + "\n", + "print(add_one(1))\n", + "print(type(add_one))\n", + "\n", + "# output:\n", + "# 2\n", + "# " + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "7BvJEP_mTQUs", + "outputId": "066281b8-a650-4c48-c786-312022198015" + }, + "outputs": [ { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Issues and feedback\n", - "\n", - "If you encounter any issues, please report them here: [GitHub Issues](https://github.com/SylphAI-Inc/LightRAG/issues).\n", - "\n", - "For feedback, you can use either the [GitHub discussions](https://github.com/SylphAI-Inc/LightRAG/discussions) or [Discord](https://discord.gg/ezzszrRZvT)." - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "2024-11-11 17:42:39 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' You are a doctor User: What is the best treatment for headache?Please be concise and only list the top treatments.'}]}\n", + "2024-11-11 17:42:40 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "2024-11-11 17:42:40 - generator - INFO - [generator.py:798:call] - output: GeneratorOutput(id=None, data='The top treatments for headache are rest, hydration, over-the-counter pain relievers such as ibuprofen or acetaminophen, and relaxation techniques such as deep breathing or meditation.', error=None, usage=CompletionUsage(completion_tokens=37, prompt_tokens=37, total_tokens=74), raw_response='The top treatments for headache are rest, hydration, over-the-counter pain relievers such as ibuprofen or acetaminophen, and relaxation techniques such as deep breathing or meditation.', metadata=None)\n", + "The top treatments for headache are rest, hydration, over-the-counter pain relievers such as ibuprofen or acetaminophen, and relaxation techniques such as deep breathing or meditation.\n" + ] } - ], - "metadata": { + ], + "source": [ + "from adalflow.core import Sequential\n", + "\n", + "\n", + "@fun_to_component\n", + "def enhance_query(query: str) -> str:\n", + " return query + \"Please be concise and only list the top treatments.\"\n", + "\n", + "\n", + "seq = Sequential(enhance_query, doc)\n", + "\n", + "query = \"What is the best treatment for headache?\"\n", + "print(seq(query))" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { "colab": { - "provenance": [] - }, - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" + "base_uri": "https://localhost:8080/" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.12" + "id": "aoZ2w8RUTQUt", + "outputId": "115d0ccf-33d1-4464-a951-cf9f5476284b" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Sequential(\n", + " (0): EnhanceQueryComponent(fun_name=enhance_query)\n", + " (1): DocQA(\n", + " (doc): Generator(\n", + " model_kwargs={'model': 'gpt-3.5-turbo'}, trainable_prompt_kwargs=[]\n", + " (prompt): Prompt(template: You are a doctor User: {{input_str}}, prompt_variables: ['input_str'])\n", + " (model_client): OpenAIClient()\n", + " )\n", + " )\n", + ")" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" } + ], + "source": [ + "seq" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "F-ffAlC6TQUt" + }, + "source": [ + "# TODO: LLM for single choices" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Issues and feedback\n", + "\n", + "If you encounter any issues, please report them here: [GitHub Issues](https://github.com/SylphAI-Inc/LightRAG/issues).\n", + "\n", + "For feedback, you can use either the [GitHub discussions](https://github.com/SylphAI-Inc/LightRAG/discussions) or [Discord](https://discord.gg/ezzszrRZvT)." + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" }, - "nbformat": 4, - "nbformat_minor": 0 + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 0 } diff --git a/notebooks/tutorials/adalflow_dataclasses.ipynb b/notebooks/tutorials/adalflow_dataclasses.ipynb index 3c96ffe5..7ae08f63 100644 --- a/notebooks/tutorials/adalflow_dataclasses.ipynb +++ b/notebooks/tutorials/adalflow_dataclasses.ipynb @@ -145,8 +145,8 @@ "\n", "\n", "# Set environment variables\n", - "os.environ['GROQ_API_KEY'] = groq_api_key\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "os.environ[\"GROQ_API_KEY\"] = groq_api_key\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", "\n", "print(\"API keys have been set.\")" ] @@ -231,12 +231,11 @@ " explanation: str = field(\n", " metadata={\"desc\": \"A brief explanation of the concept in one sentence.\"}\n", " )\n", - " example: str = field(\n", - " metadata={\"desc\": \"An example of the concept in a sentence.\"}\n", - " )\n", + " example: str = field(metadata={\"desc\": \"An example of the concept in a sentence.\"})\n", " # Control output fields order\n", " __output_fields__ = [\"explanation\", \"example\"]\n", "\n", + "\n", "# Define the template using jinja2 syntax\n", "qa_template = r\"\"\"\n", "You are a helpful assistant.\n", @@ -278,7 +277,7 @@ "\n", " async def acall(self, query: str):\n", " \"\"\"Asynchronous call to generate response\"\"\"\n", - " return await self.generator.acall({\"input_str\": query})\n" + " return await self.generator.acall({\"input_str\": query})" ] }, { @@ -380,26 +379,20 @@ "# 1. Basic DataClass with different field types\n", "@dataclass\n", "class MovieReview(adal.DataClass):\n", - " title: str = field(\n", - " metadata={\"desc\": \"The title of the movie\"}\n", - " )\n", + " title: str = field(metadata={\"desc\": \"The title of the movie\"})\n", " rating: float = field(\n", - " metadata={\n", - " \"desc\": \"Rating from 1.0 to 10.0\",\n", - " \"min\": 1.0,\n", - " \"max\": 10.0\n", - " }\n", + " metadata={\"desc\": \"Rating from 1.0 to 10.0\", \"min\": 1.0, \"max\": 10.0}\n", " )\n", " pros: List[str] = field(\n", " default_factory=list,\n", - " metadata={\"desc\": \"List of positive points about the movie\"}\n", + " metadata={\"desc\": \"List of positive points about the movie\"},\n", " )\n", " cons: List[str] = field(\n", " default_factory=list,\n", - " metadata={\"desc\": \"List of negative points about the movie\"}\n", + " metadata={\"desc\": \"List of negative points about the movie\"},\n", " )\n", "\n", - " __output_fields__ = [\"title\", \"rating\", \"pros\", \"cons\"]\n" + " __output_fields__ = [\"title\", \"rating\", \"pros\", \"cons\"]" ] }, { @@ -410,7 +403,6 @@ }, "outputs": [], "source": [ - "\n", "@dataclass\n", "class Actor(adal.DataClass):\n", " name: str = field(metadata={\"desc\": \"Actor's full name\"})\n", @@ -429,20 +421,18 @@ "\n", "# Have both MovieReview and Actor nested in DetailedMovieReview\n", "\n", + "\n", "@dataclass\n", "class DetailedMovieReview(adal.DataClass):\n", " basic_review: MovieReview\n", " cast: List[Actor] = field(\n", - " default_factory=list,\n", - " metadata={\"desc\": \"List of main actors in the movie\"}\n", + " default_factory=list, metadata={\"desc\": \"List of main actors in the movie\"}\n", " )\n", " genre: List[str] = field(\n", - " default_factory=list,\n", - " metadata={\"desc\": \"List of genres for the movie\"}\n", + " default_factory=list, metadata={\"desc\": \"List of genres for the movie\"}\n", " )\n", " recommend: bool = field(\n", - " default_factory=str,\n", - " metadata={\"desc\": \"Whether you would recommend this movie\"}\n", + " default_factory=str, metadata={\"desc\": \"Whether you would recommend this movie\"}\n", " )\n", "\n", " __output_fields__ = [\"basic_review\", \"cast\", \"genre\", \"recommend\"]" @@ -472,18 +462,25 @@ "source": [ "# Create the MovieReviewer component with MovieAnalysis data class\n", "class MovieReviewer(adal.Component):\n", - " def __init__(self, model_client: adal.ModelClient, model_kwargs: Dict, data_class: adal.DataClass):\n", + " def __init__(\n", + " self,\n", + " model_client: adal.ModelClient,\n", + " model_kwargs: Dict,\n", + " data_class: adal.DataClass,\n", + " ):\n", " super().__init__()\n", - " self.additional_structure_prompt = \"Dont use 'type' and 'properties' in output directly give as dict\"\n", - " parser = adal.DataClassParser(\n", - " data_class=data_class,\n", - " return_data_class=True\n", + " self.additional_structure_prompt = (\n", + " \"Dont use 'type' and 'properties' in output directly give as dict\"\n", " )\n", + " parser = adal.DataClassParser(data_class=data_class, return_data_class=True)\n", " self.generator = adal.Generator(\n", " model_client=model_client,\n", " model_kwargs=model_kwargs,\n", " template=movie_review_template,\n", - " prompt_kwargs={\"output_format_str\": parser.get_output_format_str() + self.additional_structure_prompt},\n", + " prompt_kwargs={\n", + " \"output_format_str\": parser.get_output_format_str()\n", + " + self.additional_structure_prompt\n", + " },\n", " output_processors=parser,\n", " )\n", "\n", @@ -512,7 +509,7 @@ "reviewer = MovieReviewer(\n", " model_client=GroqAPIClient(),\n", " model_kwargs={\"model\": \"llama3-8b-8192\"},\n", - " data_class=DetailedMovieReview\n", + " data_class=DetailedMovieReview,\n", ")\n", "\n", "response = reviewer(\"The Matrix\")\n", @@ -541,7 +538,7 @@ "reviewer = MovieReviewer(\n", " model_client=adal.OpenAIClient(),\n", " model_kwargs={\"model\": \"gpt-4o\"},\n", - " data_class=DetailedMovieReview\n", + " data_class=DetailedMovieReview,\n", ")\n", "response = reviewer(\"The Matrix\")\n", "print(f\"DetailedMovieReview: {response.data}\")\n", @@ -566,16 +563,16 @@ "source": [ "# 3. second level nested dataclass\n", "\n", + "\n", "@dataclass\n", "class MovieAnalysis(adal.DataClass):\n", " review: DetailedMovieReview\n", " box_office: float = field(\n", - " default=None,\n", - " metadata={\"desc\": \"Box office earnings in millions of dollars\"}\n", + " default=None, metadata={\"desc\": \"Box office earnings in millions of dollars\"}\n", " )\n", " awards: Dict[str, int] = field(\n", " default=None,\n", - " metadata={\"desc\": \"Dictionary of award categories and number of wins\"}\n", + " metadata={\"desc\": \"Dictionary of award categories and number of wins\"},\n", " )\n", "\n", " __output_fields__ = [\"review\", \"box_office\", \"awards\"]" @@ -605,7 +602,7 @@ "analysis = MovieReviewer(\n", " model_client=adal.OpenAIClient(),\n", " model_kwargs={\"model\": \"gpt-3.5-turbo\"},\n", - " data_class=MovieAnalysis\n", + " data_class=MovieAnalysis,\n", ")\n", "\n", "response = analysis(\"The Matrix\")\n", @@ -637,7 +634,7 @@ "analysis = MovieReviewer(\n", " model_client=GroqAPIClient(),\n", " model_kwargs={\"model\": \"llama3-8b-8192\"},\n", - " data_class=MovieAnalysis\n", + " data_class=MovieAnalysis,\n", ")\n", "\n", "response = analysis(\"The Matrix\")\n", @@ -668,33 +665,27 @@ "# 1. Basic DataClass with different field types\n", "@dataclass\n", "class SongReview(adal.DataClass):\n", - " title: str = field(\n", - " metadata={\"desc\": \"The title of the song\"}\n", - " )\n", - " album: str = field(\n", - " metadata={\"desc\": \"The album of the song\"}\n", - " )\n", + " title: str = field(metadata={\"desc\": \"The title of the song\"})\n", + " album: str = field(metadata={\"desc\": \"The album of the song\"})\n", " ranking: int = field(\n", - " metadata={\n", - " \"desc\": \"Billboard peak ranking from 1 to 200\",\n", - " \"min\": 1,\n", - " \"max\": 200\n", - " }\n", + " metadata={\"desc\": \"Billboard peak ranking from 1 to 200\", \"min\": 1, \"max\": 200}\n", " )\n", " streaming: Dict[str, int] = field(\n", " default_factory=list,\n", - " metadata={\"desc\": \"Dict of lastest approximate streaming count in spotify and in youtube. Gives the count in millions\"}\n", + " metadata={\n", + " \"desc\": \"Dict of lastest approximate streaming count in spotify and in youtube. Gives the count in millions\"\n", + " },\n", " )\n", " pros: List[str] = field(\n", " default_factory=list,\n", - " metadata={\"desc\": \"List of positive points about the song\"}\n", + " metadata={\"desc\": \"List of positive points about the song\"},\n", " )\n", " cons: List[str] = field(\n", " default_factory=list,\n", - " metadata={\"desc\": \"List of negative points about the song\"}\n", + " metadata={\"desc\": \"List of negative points about the song\"},\n", " )\n", "\n", - " __output_fields__ = [\"title\", \"rating\", \"streaming\", \"pros\", \"cons\"]\n" + " __output_fields__ = [\"title\", \"rating\", \"streaming\", \"pros\", \"cons\"]" ] }, { @@ -705,7 +696,6 @@ }, "outputs": [], "source": [ - "\n", "@dataclass\n", "class Artist(adal.DataClass):\n", " name: str = field(metadata={\"desc\": \"Artist's full name\"})\n", @@ -722,6 +712,7 @@ "source": [ "# 2. Nested DataClass example\n", "\n", + "\n", "@dataclass\n", "class DetailedSongReview(adal.DataClass):\n", " basic_review: SongReview = field(\n", @@ -729,15 +720,13 @@ " )\n", " cast: List[Artist] = field(\n", " default_factory=list,\n", - " metadata={\"desc\": \"List of main singer, lyrisist and musicians in the song\"}\n", + " metadata={\"desc\": \"List of main singer, lyrisist and musicians in the song\"},\n", " )\n", " genre: List[str] = field(\n", - " default_factory=list,\n", - " metadata={\"desc\": \"List of genres for the song\"}\n", + " default_factory=list, metadata={\"desc\": \"List of genres for the song\"}\n", " )\n", " recommend: bool = field(\n", - " default_factory=str,\n", - " metadata={\"desc\": \"Whether you would recommend this song\"}\n", + " default_factory=str, metadata={\"desc\": \"Whether you would recommend this song\"}\n", " )\n", "\n", " __output_fields__ = [\"basic_review\", \"cast\", \"genre\", \"recommend\"]" @@ -753,21 +742,19 @@ "source": [ "# 3. two levels of nesting dataclass\n", "\n", - "# all these fields as we use default, it is optional, so \n", + "# all these fields as we use default, it is optional, so\n", "# llm might not output that field if they dont have information\n", "\n", + "\n", "@dataclass\n", "class SongAnalysis(adal.DataClass):\n", " review: DetailedSongReview = field(\n", " default=DetailedSongReview, metadata={\"desc\": \"Song review details\"}\n", " )\n", - " duration: float = field(\n", - " default=None,\n", - " metadata={\"desc\": \"Duration of the song\"}\n", - " )\n", + " duration: float = field(default=None, metadata={\"desc\": \"Duration of the song\"})\n", " awards: Dict[str, int] = field(\n", " default=None,\n", - " metadata={\"desc\": \"Dictionary of award categories and number of wins\"}\n", + " metadata={\"desc\": \"Dictionary of award categories and number of wins\"},\n", " )\n", "\n", " __output_fields__ = [\"review\", \"duration\", \"awards\"]" @@ -788,7 +775,7 @@ "{{output_format_str}}\n", "\n", "\n", - " Review this song: {{song_title}} \"\"\"\n" + " Review this song: {{song_title}} \"\"\"" ] }, { @@ -803,17 +790,20 @@ "class SongReviewer(adal.Component):\n", " def __init__(self, model_client: adal.ModelClient, model_kwargs: Dict):\n", " super().__init__()\n", - " self.additional_structure_prompt = \"Dont use 'type' and 'properties' in output directly give as dict\"\n", + " self.additional_structure_prompt = (\n", + " \"Dont use 'type' and 'properties' in output directly give as dict\"\n", + " )\n", " parser = adal.DataClassParser(\n", - " data_class=SongAnalysis,\n", - " return_data_class=False,\n", - " format_type=\"json\"\n", + " data_class=SongAnalysis, return_data_class=False, format_type=\"json\"\n", " )\n", " self.generator = adal.Generator(\n", " model_client=model_client,\n", " model_kwargs=model_kwargs,\n", " template=song_review_template,\n", - " prompt_kwargs={\"output_format_str\": parser.get_output_format_str() + self.additional_structure_prompt },\n", + " prompt_kwargs={\n", + " \"output_format_str\": parser.get_output_format_str()\n", + " + self.additional_structure_prompt\n", + " },\n", " output_processors=parser,\n", " )\n", "\n", @@ -836,8 +826,8 @@ ], "source": [ "analysis = SongReviewer(\n", - " model_client=GroqAPIClient(),\n", - " model_kwargs={\"model\": \"llama3-8b-8192\"},\n", + " model_client=GroqAPIClient(),\n", + " model_kwargs={\"model\": \"llama3-8b-8192\"},\n", ")\n", "\n", "response = analysis(\"Shape of you\")\n", @@ -886,27 +876,27 @@ "print(f\"Album: {analysis['review']['basic_review']['album']}\")\n", "print(f\"Ranking: {analysis['review']['basic_review']['ranking']}\")\n", "\n", - "for platform, views in analysis['review']['basic_review']['streaming'].items():\n", + "for platform, views in analysis[\"review\"][\"basic_review\"][\"streaming\"].items():\n", " print(f\"- {platform} - {views} million views\")\n", "print(\"\\nPros:\")\n", - "for pro in analysis['review'][\"basic_review\"][\"pros\"]:\n", + "for pro in analysis[\"review\"][\"basic_review\"][\"pros\"]:\n", " print(f\"- {pro}\")\n", "\n", "print(\"\\nArtist's:\")\n", - "for actor in analysis['review'][\"cast\"]:\n", - " print(f\"- {actor['name']} as {actor['role']}\")\n", + "for actor in analysis[\"review\"][\"cast\"]:\n", + " print(f\"- {actor['name']} as {actor['role']}\")\n", "\n", - "if analysis['review']['genre']:\n", + "if analysis[\"review\"][\"genre\"]:\n", " print(\"\\nGenere: \")\n", - " for genre in analysis['review']['genre']:\n", + " for genre in analysis[\"review\"][\"genre\"]:\n", " print(f\" {genre} \")\n", "\n", - "if analysis['duration']:\n", + "if analysis[\"duration\"]:\n", " print(f\"\\nDuration: {analysis['duration']} minutes\")\n", "\n", - "if hasattr(analysis, 'awards') and analysis['awards']:\n", + "if hasattr(analysis, \"awards\") and analysis[\"awards\"]:\n", " print(\"\\nAwards:\")\n", - " for category, count in analysis['awards'].items():\n", + " for category, count in analysis[\"awards\"].items():\n", " print(f\"- {category}: {count}\")" ] }, diff --git a/notebooks/tutorials/adalflow_function_calls.ipynb b/notebooks/tutorials/adalflow_function_calls.ipynb index 622448c9..6fba3594 100644 --- a/notebooks/tutorials/adalflow_function_calls.ipynb +++ b/notebooks/tutorials/adalflow_function_calls.ipynb @@ -1,737 +1,736 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Function calls\n", + "\n", + "Tools are means LLM can use to interact with the world beyond of its internal knowledge. Technically speaking, retrievers are tools to help LLM to get more relevant context, and memory is a tool for LLM to carry out a conversation. Deciding when, which, and how to use a tool, and even to creating a tool is an agentic behavior: Function calls is a process of showing LLM a list of funciton definitions and prompt it to choose one or few of them. Many places use tools and function calls interchangably.\n", + "\n", + "In this notebook we will covert function calls, including:\n", + "\n", + "- Function call walkthrough\n", + "\n", + "- Overall design\n", + "\n", + "- Function call in action\n", + "\n", + "It follows the tutorial here: https://adalflow.sylph.ai/tutorials/tool_helper.html#" + ], + "metadata": { + "id": "lLGpv1fLLIjF" + } + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "sfKEfaYC3Go7" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "\n", + "clear_output()" + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + "os.environ[\"GROQ_API_KEY\"] = groq_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ], + "metadata": { "colab": { - "provenance": [] - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" + "base_uri": "https://localhost:8080/" }, - "language_info": { - "name": "python" + "id": "-4c_AGBt3PlR", + "outputId": "21a26437-9f95-4478-84e9-ba4369956b6f" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "Please enter your GROQ API key: ··········\n", + "API keys have been set.\n" + ] } + ] }, - "cells": [ - { - "cell_type": "markdown", - "source": [ - "# Function calls\n", - "\n", - "Tools are means LLM can use to interact with the world beyond of its internal knowledge. Technically speaking, retrievers are tools to help LLM to get more relevant context, and memory is a tool for LLM to carry out a conversation. Deciding when, which, and how to use a tool, and even to creating a tool is an agentic behavior: Function calls is a process of showing LLM a list of funciton definitions and prompt it to choose one or few of them. Many places use tools and function calls interchangably.\n", - "\n", - "In this notebook we will covert function calls, including:\n", - "\n", - "- Function call walkthrough\n", - "\n", - "- Overall design\n", - "\n", - "- Function call in action\n", - "\n", - "It follows the tutorial here: https://adalflow.sylph.ai/tutorials/tool_helper.html#" - ], - "metadata": { - "id": "lLGpv1fLLIjF" - } - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "id": "sfKEfaYC3Go7" - }, - "outputs": [], - "source": [ - "from IPython.display import clear_output\n", - "\n", - "!pip install -U adalflow[openai,groq,faiss-cpu]\n", - "\n", - "clear_output()\n" - ] - }, - { - "cell_type": "code", - "source": [ - "import os\n", - "from getpass import getpass\n", - "\n", - "# Prompt user to enter their API keys securely\n", - "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", - "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", - "\n", - "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", - "os.environ['GROQ_API_KEY'] = groq_api_key\n", - "\n", - "print(\"API keys have been set.\")\n" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "-4c_AGBt3PlR", - "outputId": "21a26437-9f95-4478-84e9-ba4369956b6f" - }, - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Please enter your OpenAI API key: ··········\n", - "Please enter your GROQ API key: ··········\n", - "API keys have been set.\n" - ] - } - ] - }, - { - "cell_type": "code", - "source": [ - "from dataclasses import dataclass\n", - "from typing import List\n", - "import numpy as np\n", - "import time\n", - "import asyncio\n", - "\n", - "\n", - "\n", - "def multiply(a: int, b: int) -> int:\n", - " \"\"\"Multiply two numbers.\"\"\"\n", - " time.sleep(1)\n", - " return a * b\n", - "\n", - "\n", - "def add(a: int, b: int) -> int:\n", - " \"\"\"Add two numbers.\"\"\"\n", - " time.sleep(1)\n", - " return a + b\n", - "\n", - "\n", - "async def divide(a: float, b: float) -> float:\n", - " \"\"\"Divide two numbers.\"\"\"\n", - " await asyncio.sleep(1)\n", - " return float(a) / b\n", - "\n", - "\n", - "async def search(query: str) -> List[str]:\n", - " \"\"\"Search for query and return a list of results.\"\"\"\n", - " await asyncio.sleep(1)\n", - " return [\"result1\" + query, \"result2\" + query]\n", - "\n", - "\n", - "def numpy_sum(arr: np.ndarray) -> float:\n", - " \"\"\"Sum the elements of an array.\"\"\"\n", - " return np.sum(arr)\n", - "\n", - "\n", - "x = 2\n", - "\n", - "@dataclass\n", - "class Point:\n", - " x: int\n", - " y: int\n", - "\n", - "\n", - "def add_points(p1: Point, p2: Point) -> Point:\n", - " return Point(p1.x + p2.x, p1.y + p2.y)" - ], - "metadata": { - "id": "GMKuuP7xR9Nt" - }, - "execution_count": 4, - "outputs": [] - }, - { - "cell_type": "markdown", - "source": [ - "## Function Tool" - ], - "metadata": { - "id": "jCA7HMjtT16P" - } - }, - { - "cell_type": "code", - "source": [ - "from adalflow.core.func_tool import FunctionTool\n", - "\n", - "functions =[multiply, add, divide, search, numpy_sum, add_points]\n", - "tools = [\n", - " FunctionTool(fn=fn) for fn in functions\n", - "]\n", - "for tool in tools:\n", - " print(tool)" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "fgOEoLoDSBqh", - "outputId": "7e636e2c-9a5d-44f1-f0fe-fe8a6bea474d" - }, - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='multiply', func_desc='multiply(a: int, b: int) -> int\\nMultiply two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']}))\n", - "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add', func_desc='add(a: int, b: int) -> int\\nAdd two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']}))\n", - "FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='divide', func_desc='divide(a: float, b: float) -> float\\nDivide two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'float'}, 'b': {'type': 'float'}}, 'required': ['a', 'b']}))\n", - "FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='search', func_desc='search(query: str) -> List[str]\\nSearch for query and return a list of results.', func_parameters={'type': 'object', 'properties': {'query': {'type': 'str'}}, 'required': ['query']}))\n", - "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='numpy_sum', func_desc='numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', func_parameters={'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']}))\n", - "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add_points', func_desc='add_points(p1: __main__.Point, p2: __main__.Point) -> __main__.Point\\nNone', func_parameters={'type': 'object', 'properties': {'p1': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}, 'p2': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}}, 'required': ['p1', 'p2']}))\n" - ] - } - ] - }, - { - "cell_type": "code", - "source": [ - "print(tools[-2].definition.to_dict())" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "CYJaHFhGSEzH", - "outputId": "9ab36c6c-7509-4e7f-ce85-11dae889c8c2" - }, - "execution_count": 6, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "{'func_name': 'numpy_sum', 'func_desc': 'numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', 'func_parameters': {'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']}}\n" - ] - } - ] - }, - { - "cell_type": "code", - "source": [ - "context_map = {tool.definition.func_name: tool for tool in tools}" - ], - "metadata": { - "id": "_O4bQgXrSKb6" - }, - "execution_count": 7, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "function_name = \"add\"\n", - "function_to_call = context_map[function_name]\n", - "function_args = {\"a\": 1, \"b\": 2}\n", - "function_response = function_to_call.call(**function_args)" - ], - "metadata": { - "id": "-RgWWMdISL1u" - }, - "execution_count": 8, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "from adalflow.core.tool_manager import ToolManager\n", - "\n", - "tool_manager = ToolManager(tools=functions)\n", - "print(tool_manager)" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "6CT7Tez1SOai", - "outputId": "e486d882-9179-4db3-f077-6adfc9fc6579" - }, - "execution_count": 9, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "ToolManager(Tools: [FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='multiply', func_desc='multiply(a: int, b: int) -> int\\nMultiply two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add', func_desc='add(a: int, b: int) -> int\\nAdd two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='divide', func_desc='divide(a: float, b: float) -> float\\nDivide two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'float'}, 'b': {'type': 'float'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='search', func_desc='search(query: str) -> List[str]\\nSearch for query and return a list of results.', func_parameters={'type': 'object', 'properties': {'query': {'type': 'str'}}, 'required': ['query']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='numpy_sum', func_desc='numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', func_parameters={'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add_points', func_desc='add_points(p1: __main__.Point, p2: __main__.Point) -> __main__.Point\\nNone', func_parameters={'type': 'object', 'properties': {'p1': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}, 'p2': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}}, 'required': ['p1', 'p2']}))], Additional Context: {})\n" - ] - } - ] - }, - { - "cell_type": "markdown", - "source": [ - "## ToolManager" - ], - "metadata": { - "id": "jzFqNnN_T-cu" - } - }, - { - "cell_type": "code", - "source": [ - "from adalflow.core.tool_manager import ToolManager\n", - "\n", - "tool_manager = ToolManager(tools=functions)\n", - "print(tool_manager)" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "JX7MibWiUF3U", - "outputId": "20707186-5ec3-49a4-d553-c3160c3daa84" - }, - "execution_count": 10, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "ToolManager(Tools: [FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='multiply', func_desc='multiply(a: int, b: int) -> int\\nMultiply two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add', func_desc='add(a: int, b: int) -> int\\nAdd two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='divide', func_desc='divide(a: float, b: float) -> float\\nDivide two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'float'}, 'b': {'type': 'float'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='search', func_desc='search(query: str) -> List[str]\\nSearch for query and return a list of results.', func_parameters={'type': 'object', 'properties': {'query': {'type': 'str'}}, 'required': ['query']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='numpy_sum', func_desc='numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', func_parameters={'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add_points', func_desc='add_points(p1: __main__.Point, p2: __main__.Point) -> __main__.Point\\nNone', func_parameters={'type': 'object', 'properties': {'p1': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}, 'p2': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}}, 'required': ['p1', 'p2']}))], Additional Context: {})\n" - ] - } - ] + { + "cell_type": "code", + "source": [ + "from dataclasses import dataclass\n", + "from typing import List\n", + "import numpy as np\n", + "import time\n", + "import asyncio\n", + "\n", + "\n", + "def multiply(a: int, b: int) -> int:\n", + " \"\"\"Multiply two numbers.\"\"\"\n", + " time.sleep(1)\n", + " return a * b\n", + "\n", + "\n", + "def add(a: int, b: int) -> int:\n", + " \"\"\"Add two numbers.\"\"\"\n", + " time.sleep(1)\n", + " return a + b\n", + "\n", + "\n", + "async def divide(a: float, b: float) -> float:\n", + " \"\"\"Divide two numbers.\"\"\"\n", + " await asyncio.sleep(1)\n", + " return float(a) / b\n", + "\n", + "\n", + "async def search(query: str) -> List[str]:\n", + " \"\"\"Search for query and return a list of results.\"\"\"\n", + " await asyncio.sleep(1)\n", + " return [\"result1\" + query, \"result2\" + query]\n", + "\n", + "\n", + "def numpy_sum(arr: np.ndarray) -> float:\n", + " \"\"\"Sum the elements of an array.\"\"\"\n", + " return np.sum(arr)\n", + "\n", + "\n", + "x = 2\n", + "\n", + "\n", + "@dataclass\n", + "class Point:\n", + " x: int\n", + " y: int\n", + "\n", + "\n", + "def add_points(p1: Point, p2: Point) -> Point:\n", + " return Point(p1.x + p2.x, p1.y + p2.y)" + ], + "metadata": { + "id": "GMKuuP7xR9Nt" + }, + "execution_count": 4, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## Function Tool" + ], + "metadata": { + "id": "jCA7HMjtT16P" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.func_tool import FunctionTool\n", + "\n", + "functions = [multiply, add, divide, search, numpy_sum, add_points]\n", + "tools = [FunctionTool(fn=fn) for fn in functions]\n", + "for tool in tools:\n", + " print(tool)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "fgOEoLoDSBqh", + "outputId": "7e636e2c-9a5d-44f1-f0fe-fe8a6bea474d" + }, + "execution_count": 5, + "outputs": [ { - "cell_type": "markdown", - "source": [ - "## Function Call end-to-end" - ], - "metadata": { - "id": "9Bw2fs--UKX7" - } + "output_type": "stream", + "name": "stdout", + "text": [ + "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='multiply', func_desc='multiply(a: int, b: int) -> int\\nMultiply two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']}))\n", + "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add', func_desc='add(a: int, b: int) -> int\\nAdd two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']}))\n", + "FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='divide', func_desc='divide(a: float, b: float) -> float\\nDivide two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'float'}, 'b': {'type': 'float'}}, 'required': ['a', 'b']}))\n", + "FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='search', func_desc='search(query: str) -> List[str]\\nSearch for query and return a list of results.', func_parameters={'type': 'object', 'properties': {'query': {'type': 'str'}}, 'required': ['query']}))\n", + "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='numpy_sum', func_desc='numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', func_parameters={'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']}))\n", + "FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add_points', func_desc='add_points(p1: __main__.Point, p2: __main__.Point) -> __main__.Point\\nNone', func_parameters={'type': 'object', 'properties': {'p1': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}, 'p2': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}}, 'required': ['p1', 'p2']}))\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "print(tools[-2].definition.to_dict())" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "CYJaHFhGSEzH", + "outputId": "9ab36c6c-7509-4e7f-ce85-11dae889c8c2" + }, + "execution_count": 6, + "outputs": [ { - "cell_type": "code", - "source": [ - "template = r\"\"\"You have these tools available:\n", - "{% if tools %}\n", - "\n", - "{% for tool in tools %}\n", - "{{ loop.index }}.\n", - "{{tool}}\n", - "------------------------\n", - "{% endfor %}\n", - "\n", - "{% endif %}\n", - "\n", - "{{output_format_str}}\n", - "\n", - "\n", - "User: {{input_str}}\n", - "You:\n", - "\"\"\"" - ], - "metadata": { - "id": "TywPQMIVUOqh" - }, - "execution_count": 11, - "outputs": [] + "output_type": "stream", + "name": "stdout", + "text": [ + "{'func_name': 'numpy_sum', 'func_desc': 'numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', 'func_parameters': {'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']}}\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "context_map = {tool.definition.func_name: tool for tool in tools}" + ], + "metadata": { + "id": "_O4bQgXrSKb6" + }, + "execution_count": 7, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "function_name = \"add\"\n", + "function_to_call = context_map[function_name]\n", + "function_args = {\"a\": 1, \"b\": 2}\n", + "function_response = function_to_call.call(**function_args)" + ], + "metadata": { + "id": "-RgWWMdISL1u" + }, + "execution_count": 8, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.tool_manager import ToolManager\n", + "\n", + "tool_manager = ToolManager(tools=functions)\n", + "print(tool_manager)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "6CT7Tez1SOai", + "outputId": "e486d882-9179-4db3-f077-6adfc9fc6579" + }, + "execution_count": 9, + "outputs": [ { - "cell_type": "code", - "source": [ - "from adalflow.core.prompt_builder import Prompt\n", - "\n", - "prompt = Prompt(template=template)\n", - "small_tool_manager = ToolManager(tools=tools[:2])\n", - "\n", - "renered_prompt = prompt(tools=small_tool_manager.yaml_definitions)\n", - "print(renered_prompt)" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "-vMajeXoUQ5A", - "outputId": "ca68601b-e9c8-41c3-a6fa-777f225e68e3" - }, - "execution_count": 12, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "You have these tools available:\n", - "\n", - "1.\n", - "func_name: multiply\n", - "func_desc: 'multiply(a: int, b: int) -> int\n", - "\n", - " Multiply two numbers.'\n", - "func_parameters:\n", - " type: object\n", - " properties:\n", - " a:\n", - " type: int\n", - " b:\n", - " type: int\n", - " required:\n", - " - a\n", - " - b\n", - "------------------------\n", - "2.\n", - "func_name: add\n", - "func_desc: 'add(a: int, b: int) -> int\n", - "\n", - " Add two numbers.'\n", - "func_parameters:\n", - " type: object\n", - " properties:\n", - " a:\n", - " type: int\n", - " b:\n", - " type: int\n", - " required:\n", - " - a\n", - " - b\n", - "------------------------\n", - "\n", - "\n", - "None\n", - "\n", - "\n", - "User: None\n", - "You:\n", - "\n" - ] - } - ] + "output_type": "stream", + "name": "stdout", + "text": [ + "ToolManager(Tools: [FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='multiply', func_desc='multiply(a: int, b: int) -> int\\nMultiply two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add', func_desc='add(a: int, b: int) -> int\\nAdd two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='divide', func_desc='divide(a: float, b: float) -> float\\nDivide two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'float'}, 'b': {'type': 'float'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='search', func_desc='search(query: str) -> List[str]\\nSearch for query and return a list of results.', func_parameters={'type': 'object', 'properties': {'query': {'type': 'str'}}, 'required': ['query']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='numpy_sum', func_desc='numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', func_parameters={'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add_points', func_desc='add_points(p1: __main__.Point, p2: __main__.Point) -> __main__.Point\\nNone', func_parameters={'type': 'object', 'properties': {'p1': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}, 'p2': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}}, 'required': ['p1', 'p2']}))], Additional Context: {})\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## ToolManager" + ], + "metadata": { + "id": "jzFqNnN_T-cu" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.tool_manager import ToolManager\n", + "\n", + "tool_manager = ToolManager(tools=functions)\n", + "print(tool_manager)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "JX7MibWiUF3U", + "outputId": "20707186-5ec3-49a4-d553-c3160c3daa84" + }, + "execution_count": 10, + "outputs": [ { - "cell_type": "code", - "source": [ - "from adalflow.core.types import Function\n", - "\n", - "output_data_class = Function\n", - "output_format_str = output_data_class.to_json_signature(exclude=[\"thought\", \"args\"])\n", - "\n", - "renered_prompt= prompt(output_format_str=output_format_str)\n", - "print(renered_prompt)" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "V9-90IFRUUNT", - "outputId": "ed2f829e-c656-43c6-a454-8a7c32d5dafe" - }, - "execution_count": 13, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "You have these tools available:\n", - "\n", - "{\n", - " \"name\": \"The name of the function (str) (optional)\",\n", - " \"kwargs\": \"The keyword arguments of the function (Optional[Dict[str, object]]) (optional)\"\n", - "}\n", - "\n", - "\n", - "User: None\n", - "You:\n", - "\n" - ] - } - ] + "output_type": "stream", + "name": "stdout", + "text": [ + "ToolManager(Tools: [FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='multiply', func_desc='multiply(a: int, b: int) -> int\\nMultiply two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add', func_desc='add(a: int, b: int) -> int\\nAdd two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'int'}, 'b': {'type': 'int'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='divide', func_desc='divide(a: float, b: float) -> float\\nDivide two numbers.', func_parameters={'type': 'object', 'properties': {'a': {'type': 'float'}, 'b': {'type': 'float'}}, 'required': ['a', 'b']})), FunctionTool(fn: , async: True, definition: FunctionDefinition(func_name='search', func_desc='search(query: str) -> List[str]\\nSearch for query and return a list of results.', func_parameters={'type': 'object', 'properties': {'query': {'type': 'str'}}, 'required': ['query']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='numpy_sum', func_desc='numpy_sum(arr: numpy.ndarray) -> float\\nSum the elements of an array.', func_parameters={'type': 'object', 'properties': {'arr': {'type': 'ndarray'}}, 'required': ['arr']})), FunctionTool(fn: , async: False, definition: FunctionDefinition(func_name='add_points', func_desc='add_points(p1: __main__.Point, p2: __main__.Point) -> __main__.Point\\nNone', func_parameters={'type': 'object', 'properties': {'p1': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}, 'p2': {'type': \"{'type': 'Point', 'properties': {'x': {'type': 'int'}, 'y': {'type': 'int'}}, 'required': ['x', 'y']}\"}}, 'required': ['p1', 'p2']}))], Additional Context: {})\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Function Call end-to-end" + ], + "metadata": { + "id": "9Bw2fs--UKX7" + } + }, + { + "cell_type": "code", + "source": [ + "template = r\"\"\"You have these tools available:\n", + "{% if tools %}\n", + "\n", + "{% for tool in tools %}\n", + "{{ loop.index }}.\n", + "{{tool}}\n", + "------------------------\n", + "{% endfor %}\n", + "\n", + "{% endif %}\n", + "\n", + "{{output_format_str}}\n", + "\n", + "\n", + "User: {{input_str}}\n", + "You:\n", + "\"\"\"" + ], + "metadata": { + "id": "TywPQMIVUOqh" + }, + "execution_count": 11, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.prompt_builder import Prompt\n", + "\n", + "prompt = Prompt(template=template)\n", + "small_tool_manager = ToolManager(tools=tools[:2])\n", + "\n", + "renered_prompt = prompt(tools=small_tool_manager.yaml_definitions)\n", + "print(renered_prompt)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "-vMajeXoUQ5A", + "outputId": "ca68601b-e9c8-41c3-a6fa-777f225e68e3" + }, + "execution_count": 12, + "outputs": [ { - "cell_type": "code", - "source": [ - "from adalflow.core.types import FunctionExpression\n", - "\n", - "output_data_class = FunctionExpression\n", - "output_format_str = output_data_class.to_json_signature(exclude=[\"thought\"])\n", - "print(prompt(output_format_str=output_format_str))" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "p3kPMhWaUYT1", - "outputId": "a3de7117-c3eb-404e-e2e7-8a5187b32f6b" - }, - "execution_count": 14, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "You have these tools available:\n", - "\n", - "{\n", - " \"action\": \"FuncName() Valid function call expression. Example: \\\"FuncName(a=1, b=2)\\\" Follow the data type specified in the function parameters.e.g. for Type object with x,y properties, use \\\"ObjectType(x=1, y=2) (str) (required)\"\n", - "}\n", - "\n", - "\n", - "User: None\n", - "You:\n", - "\n" - ] - } - ] + "output_type": "stream", + "name": "stdout", + "text": [ + "You have these tools available:\n", + "\n", + "1.\n", + "func_name: multiply\n", + "func_desc: 'multiply(a: int, b: int) -> int\n", + "\n", + " Multiply two numbers.'\n", + "func_parameters:\n", + " type: object\n", + " properties:\n", + " a:\n", + " type: int\n", + " b:\n", + " type: int\n", + " required:\n", + " - a\n", + " - b\n", + "------------------------\n", + "2.\n", + "func_name: add\n", + "func_desc: 'add(a: int, b: int) -> int\n", + "\n", + " Add two numbers.'\n", + "func_parameters:\n", + " type: object\n", + " properties:\n", + " a:\n", + " type: int\n", + " b:\n", + " type: int\n", + " required:\n", + " - a\n", + " - b\n", + "------------------------\n", + "\n", + "\n", + "None\n", + "\n", + "\n", + "User: None\n", + "You:\n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.types import Function\n", + "\n", + "output_data_class = Function\n", + "output_format_str = output_data_class.to_json_signature(exclude=[\"thought\", \"args\"])\n", + "\n", + "renered_prompt = prompt(output_format_str=output_format_str)\n", + "print(renered_prompt)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "V9-90IFRUUNT", + "outputId": "ed2f829e-c656-43c6-a454-8a7c32d5dafe" + }, + "execution_count": 13, + "outputs": [ { - "cell_type": "code", - "source": [ - "from adalflow.components.output_parsers import JsonOutputParser\n", - "\n", - "func_parser = JsonOutputParser(data_class=Function, exclude_fields=[\"thought\", \"args\"])\n", - "instructions = func_parser.format_instructions()\n", - "print(instructions)" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "MvGyoUmMUatR", - "outputId": "e819866b-f6e3-4c88-f9f1-22d725a28865" - }, - "execution_count": 17, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Your output should be formatted as a standard JSON instance with the following schema:\n", - "```\n", - "{\n", - " \"name\": \"The name of the function (str) (optional)\",\n", - " \"kwargs\": \"The keyword arguments of the function (Optional[Dict[str, object]]) (optional)\"\n", - "}\n", - "```\n", - "-Make sure to always enclose the JSON output in triple backticks (```). Please do not add anything other than valid JSON output!\n", - "-Use double quotes for the keys and string values.\n", - "-DO NOT mistaken the \"properties\" and \"type\" in the schema as the actual fields in the JSON output.\n", - "-Follow the JSON formatting conventions.\n" - ] - } - ] + "output_type": "stream", + "name": "stdout", + "text": [ + "You have these tools available:\n", + "\n", + "{\n", + " \"name\": \"The name of the function (str) (optional)\",\n", + " \"kwargs\": \"The keyword arguments of the function (Optional[Dict[str, object]]) (optional)\"\n", + "}\n", + "\n", + "\n", + "User: None\n", + "You:\n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.types import FunctionExpression\n", + "\n", + "output_data_class = FunctionExpression\n", + "output_format_str = output_data_class.to_json_signature(exclude=[\"thought\"])\n", + "print(prompt(output_format_str=output_format_str))" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "p3kPMhWaUYT1", + "outputId": "a3de7117-c3eb-404e-e2e7-8a5187b32f6b" + }, + "execution_count": 14, + "outputs": [ { - "cell_type": "markdown", - "source": [ - "## Function Output Format" - ], - "metadata": { - "id": "9W7DiGcpUme5" - } + "output_type": "stream", + "name": "stdout", + "text": [ + "You have these tools available:\n", + "\n", + "{\n", + " \"action\": \"FuncName() Valid function call expression. Example: \\\"FuncName(a=1, b=2)\\\" Follow the data type specified in the function parameters.e.g. for Type object with x,y properties, use \\\"ObjectType(x=1, y=2) (str) (required)\"\n", + "}\n", + "\n", + "\n", + "User: None\n", + "You:\n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.components.output_parsers import JsonOutputParser\n", + "\n", + "func_parser = JsonOutputParser(data_class=Function, exclude_fields=[\"thought\", \"args\"])\n", + "instructions = func_parser.format_instructions()\n", + "print(instructions)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "MvGyoUmMUatR", + "outputId": "e819866b-f6e3-4c88-f9f1-22d725a28865" + }, + "execution_count": 17, + "outputs": [ { - "cell_type": "code", - "source": [ - "from adalflow.core.generator import Generator\n", - "from adalflow.core.types import ModelClientType\n", - "\n", - "model_kwargs = {\"model\": \"gpt-4o-mini\"}\n", - "prompt_kwargs = {\n", - " \"tools\": tool_manager.yaml_definitions,\n", - " \"output_format_str\": func_parser.format_instructions(),\n", - "}\n", - "generator = Generator(\n", - " model_client=ModelClientType.OPENAI(),\n", - " model_kwargs=model_kwargs,\n", - " template=template,\n", - " prompt_kwargs=prompt_kwargs,\n", - " output_processors=func_parser,\n", - ")" - ], - "metadata": { - "id": "z5tNhoruUp6o" - }, - "execution_count": 20, - "outputs": [] + "output_type": "stream", + "name": "stdout", + "text": [ + "Your output should be formatted as a standard JSON instance with the following schema:\n", + "```\n", + "{\n", + " \"name\": \"The name of the function (str) (optional)\",\n", + " \"kwargs\": \"The keyword arguments of the function (Optional[Dict[str, object]]) (optional)\"\n", + "}\n", + "```\n", + "-Make sure to always enclose the JSON output in triple backticks (```). Please do not add anything other than valid JSON output!\n", + "-Use double quotes for the keys and string values.\n", + "-DO NOT mistaken the \"properties\" and \"type\" in the schema as the actual fields in the JSON output.\n", + "-Follow the JSON formatting conventions.\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Function Output Format" + ], + "metadata": { + "id": "9W7DiGcpUme5" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.core.generator import Generator\n", + "from adalflow.core.types import ModelClientType\n", + "\n", + "model_kwargs = {\"model\": \"gpt-4o-mini\"}\n", + "prompt_kwargs = {\n", + " \"tools\": tool_manager.yaml_definitions,\n", + " \"output_format_str\": func_parser.format_instructions(),\n", + "}\n", + "generator = Generator(\n", + " model_client=ModelClientType.OPENAI(),\n", + " model_kwargs=model_kwargs,\n", + " template=template,\n", + " prompt_kwargs=prompt_kwargs,\n", + " output_processors=func_parser,\n", + ")" + ], + "metadata": { + "id": "z5tNhoruUp6o" + }, + "execution_count": 20, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "queries = [\n", + " \"add 2 and 3\",\n", + " \"search for something\",\n", + " \"add points (1, 2) and (3, 4)\",\n", + " \"sum numpy array with arr = np.array([[1, 2], [3, 4]])\",\n", + " \"multiply 2 with local variable x\",\n", + " \"divide 2 by 3\",\n", + " \"Add 5 to variable y\",\n", + "]\n", + "\n", + "for idx, query in enumerate(queries):\n", + " prompt_kwargs = {\"input_str\": query}\n", + " print(f\"\\n{idx} Query: {query}\")\n", + " print(f\"{'-'*50}\")\n", + " try:\n", + " result = generator(prompt_kwargs=prompt_kwargs)\n", + " # print(f\"LLM raw output: {result.raw_response}\")\n", + " func = Function.from_dict(result.data)\n", + " print(f\"Function: {func}\")\n", + " func_output = tool_manager.execute_func(func)\n", + " print(f\"Function output: {func_output}\")\n", + " except Exception as e:\n", + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "9DCukn1SUs_x", + "outputId": "dcfd952c-0699-4d79-ee6d-a59373e3c75d" + }, + "execution_count": 21, + "outputs": [ { - "cell_type": "code", - "source": [ - "queries = [\n", - " \"add 2 and 3\",\n", - " \"search for something\",\n", - " \"add points (1, 2) and (3, 4)\",\n", - " \"sum numpy array with arr = np.array([[1, 2], [3, 4]])\",\n", - " \"multiply 2 with local variable x\",\n", - " \"divide 2 by 3\",\n", - " \"Add 5 to variable y\",\n", - "]\n", - "\n", - "for idx, query in enumerate(queries):\n", - " prompt_kwargs = {\"input_str\": query}\n", - " print(f\"\\n{idx} Query: {query}\")\n", - " print(f\"{'-'*50}\")\n", - " try:\n", - " result = generator(prompt_kwargs=prompt_kwargs)\n", - " # print(f\"LLM raw output: {result.raw_response}\")\n", - " func = Function.from_dict(result.data)\n", - " print(f\"Function: {func}\")\n", - " func_output = tool_manager.execute_func(func)\n", - " print(f\"Function output: {func_output}\")\n", - " except Exception as e:\n", - " print(\n", - " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", - " )" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "9DCukn1SUs_x", - "outputId": "dcfd952c-0699-4d79-ee6d-a59373e3c75d" - }, - "execution_count": 21, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "\n", - "0 Query: add 2 and 3\n", - "--------------------------------------------------\n", - "Function: Function(thought=None, name='add', args=[], kwargs={'a': 2, 'b': 3})\n", - "Function output: FunctionOutput(name='add', input=Function(thought=None, name='add', args=(), kwargs={'a': 2, 'b': 3}), parsed_input=None, output=5, error=None)\n", - "\n", - "1 Query: search for something\n", - "--------------------------------------------------\n", - "Function: Function(thought=None, name='search', args=[], kwargs={'query': 'something'})\n", - "Function output: FunctionOutput(name='search', input=Function(thought=None, name='search', args=(), kwargs={'query': 'something'}), parsed_input=None, output=['result1something', 'result2something'], error=None)\n", - "\n", - "2 Query: add points (1, 2) and (3, 4)\n", - "--------------------------------------------------\n" - ] - }, - { - "output_type": "stream", - "name": "stderr", - "text": [ - "ERROR:adalflow.core.func_tool:Error at calling : 'dict' object has no attribute 'x'\n" - ] - }, - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Function: Function(thought=None, name='add_points', args=[], kwargs={'p1': {'x': 1, 'y': 2}, 'p2': {'x': 3, 'y': 4}})\n", - "Function output: FunctionOutput(name='add_points', input=Function(thought=None, name='add_points', args=(), kwargs={'p1': {'x': 1, 'y': 2}, 'p2': {'x': 3, 'y': 4}}), parsed_input=None, output=None, error=\"'dict' object has no attribute 'x'\")\n", - "\n", - "3 Query: sum numpy array with arr = np.array([[1, 2], [3, 4]])\n", - "--------------------------------------------------\n", - "Function: Function(thought=None, name='numpy_sum', args=[], kwargs={'arr': [[1, 2], [3, 4]]})\n", - "Function output: FunctionOutput(name='numpy_sum', input=Function(thought=None, name='numpy_sum', args=(), kwargs={'arr': [[1, 2], [3, 4]]}), parsed_input=None, output=10, error=None)\n", - "\n", - "4 Query: multiply 2 with local variable x\n", - "--------------------------------------------------\n", - "Function: Function(thought=None, name='multiply', args=[], kwargs={'a': 2, 'b': 'x'})\n", - "Function output: FunctionOutput(name='multiply', input=Function(thought=None, name='multiply', args=(), kwargs={'a': 2, 'b': 'x'}), parsed_input=None, output='xx', error=None)\n", - "\n", - "5 Query: divide 2 by 3\n", - "--------------------------------------------------\n", - "Function: Function(thought=None, name='divide', args=[], kwargs={'a': 2.0, 'b': 3.0})\n", - "Function output: FunctionOutput(name='divide', input=Function(thought=None, name='divide', args=(), kwargs={'a': 2.0, 'b': 3.0}), parsed_input=None, output=0.6666666666666666, error=None)\n", - "\n", - "6 Query: Add 5 to variable y\n", - "--------------------------------------------------\n", - "Function: Function(thought=None, name='add', args=[], kwargs={'a': 5, 'b': 'y'})\n" - ] - }, - { - "output_type": "stream", - "name": "stderr", - "text": [ - "ERROR:adalflow.core.func_tool:Error at calling : unsupported operand type(s) for +: 'int' and 'str'\n" - ] - }, - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Function output: FunctionOutput(name='add', input=Function(thought=None, name='add', args=(), kwargs={'a': 5, 'b': 'y'}), parsed_input=None, output=None, error=\"unsupported operand type(s) for +: 'int' and 'str'\")\n" - ] - } - ] + "output_type": "stream", + "name": "stdout", + "text": [ + "\n", + "0 Query: add 2 and 3\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='add', args=[], kwargs={'a': 2, 'b': 3})\n", + "Function output: FunctionOutput(name='add', input=Function(thought=None, name='add', args=(), kwargs={'a': 2, 'b': 3}), parsed_input=None, output=5, error=None)\n", + "\n", + "1 Query: search for something\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='search', args=[], kwargs={'query': 'something'})\n", + "Function output: FunctionOutput(name='search', input=Function(thought=None, name='search', args=(), kwargs={'query': 'something'}), parsed_input=None, output=['result1something', 'result2something'], error=None)\n", + "\n", + "2 Query: add points (1, 2) and (3, 4)\n", + "--------------------------------------------------\n" + ] }, { - "cell_type": "markdown", - "source": [ - "## FunctionExpression Output Format" - ], - "metadata": { - "id": "O-sBTPATUwsD" - } + "output_type": "stream", + "name": "stderr", + "text": [ + "ERROR:adalflow.core.func_tool:Error at calling : 'dict' object has no attribute 'x'\n" + ] }, { - "cell_type": "code", - "source": [ - "tool_manager = ToolManager(\n", - " tools=functions,\n", - " additional_context={\"x\": x, \"y\": 0, \"np.array\": np.array, \"np\": np},\n", - ")\n", - "func_parser = JsonOutputParser(data_class=FunctionExpression)" - ], - "metadata": { - "id": "TVRZ44N1UyWg" - }, - "execution_count": 22, - "outputs": [] + "output_type": "stream", + "name": "stdout", + "text": [ + "Function: Function(thought=None, name='add_points', args=[], kwargs={'p1': {'x': 1, 'y': 2}, 'p2': {'x': 3, 'y': 4}})\n", + "Function output: FunctionOutput(name='add_points', input=Function(thought=None, name='add_points', args=(), kwargs={'p1': {'x': 1, 'y': 2}, 'p2': {'x': 3, 'y': 4}}), parsed_input=None, output=None, error=\"'dict' object has no attribute 'x'\")\n", + "\n", + "3 Query: sum numpy array with arr = np.array([[1, 2], [3, 4]])\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='numpy_sum', args=[], kwargs={'arr': [[1, 2], [3, 4]]})\n", + "Function output: FunctionOutput(name='numpy_sum', input=Function(thought=None, name='numpy_sum', args=(), kwargs={'arr': [[1, 2], [3, 4]]}), parsed_input=None, output=10, error=None)\n", + "\n", + "4 Query: multiply 2 with local variable x\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='multiply', args=[], kwargs={'a': 2, 'b': 'x'})\n", + "Function output: FunctionOutput(name='multiply', input=Function(thought=None, name='multiply', args=(), kwargs={'a': 2, 'b': 'x'}), parsed_input=None, output='xx', error=None)\n", + "\n", + "5 Query: divide 2 by 3\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='divide', args=[], kwargs={'a': 2.0, 'b': 3.0})\n", + "Function output: FunctionOutput(name='divide', input=Function(thought=None, name='divide', args=(), kwargs={'a': 2.0, 'b': 3.0}), parsed_input=None, output=0.6666666666666666, error=None)\n", + "\n", + "6 Query: Add 5 to variable y\n", + "--------------------------------------------------\n", + "Function: Function(thought=None, name='add', args=[], kwargs={'a': 5, 'b': 'y'})\n" + ] }, { - "cell_type": "code", - "source": [ - "context = r\"\"\"\n", - "Your function expression also have access to these context:\n", - "{{context_str}}\n", - "\n", - "\"\"\"" - ], - "metadata": { - "id": "9h47p4XpU2BC" - }, - "execution_count": 23, - "outputs": [] + "output_type": "stream", + "name": "stderr", + "text": [ + "ERROR:adalflow.core.func_tool:Error at calling : unsupported operand type(s) for +: 'int' and 'str'\n" + ] }, { - "cell_type": "code", - "source": [ - "async def run_async_function_call(self, generator, tool_manager):\n", - " answers = []\n", - " start_time = time.time()\n", - " tasks = []\n", - " for idx, query in enumerate(queries):\n", - " tasks.append(self.process_query(idx, query, generator, tool_manager))\n", - "\n", - " results = await asyncio.gather(*tasks)\n", - " answers.extend(results)\n", - " end_time = time.time()\n", - " print(f\"Total time taken: {end_time - start_time :.2f} seconds\")\n", - " return answers\n", - "\n", - "async def process_query(self, idx, query, generator, tool_manager: ToolManager):\n", - " print(f\"\\n{idx} Query: {query}\")\n", - " print(f\"{'-'*50}\")\n", - " try:\n", - " result = generator(prompt_kwargs={\"input_str\": query})\n", - " func_expr = FunctionExpression.from_dict(result.data)\n", - " print(f\"Function_expr: {func_expr}\")\n", - " func = tool_manager.parse_func_expr(func_expr)\n", - " func_output = await tool_manager.execute_func_async(func)\n", - " print(f\"Function output: {func_output}\")\n", - " return func_output\n", - " except Exception as e:\n", - " print(\n", - " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", - " )\n", - " return None" - ], - "metadata": { - "id": "n9Qq7wcOU4X9" - }, - "execution_count": 24, - "outputs": [] + "output_type": "stream", + "name": "stdout", + "text": [ + "Function output: FunctionOutput(name='add', input=Function(thought=None, name='add', args=(), kwargs={'a': 5, 'b': 'y'}), parsed_input=None, output=None, error=\"unsupported operand type(s) for +: 'int' and 'str'\")\n" + ] } - ] + ] + }, + { + "cell_type": "markdown", + "source": [ + "## FunctionExpression Output Format" + ], + "metadata": { + "id": "O-sBTPATUwsD" + } + }, + { + "cell_type": "code", + "source": [ + "tool_manager = ToolManager(\n", + " tools=functions,\n", + " additional_context={\"x\": x, \"y\": 0, \"np.array\": np.array, \"np\": np},\n", + ")\n", + "func_parser = JsonOutputParser(data_class=FunctionExpression)" + ], + "metadata": { + "id": "TVRZ44N1UyWg" + }, + "execution_count": 22, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "context = r\"\"\"\n", + "Your function expression also have access to these context:\n", + "{{context_str}}\n", + "\n", + "\"\"\"" + ], + "metadata": { + "id": "9h47p4XpU2BC" + }, + "execution_count": 23, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "async def run_async_function_call(self, generator, tool_manager):\n", + " answers = []\n", + " start_time = time.time()\n", + " tasks = []\n", + " for idx, query in enumerate(queries):\n", + " tasks.append(self.process_query(idx, query, generator, tool_manager))\n", + "\n", + " results = await asyncio.gather(*tasks)\n", + " answers.extend(results)\n", + " end_time = time.time()\n", + " print(f\"Total time taken: {end_time - start_time :.2f} seconds\")\n", + " return answers\n", + "\n", + "\n", + "async def process_query(self, idx, query, generator, tool_manager: ToolManager):\n", + " print(f\"\\n{idx} Query: {query}\")\n", + " print(f\"{'-'*50}\")\n", + " try:\n", + " result = generator(prompt_kwargs={\"input_str\": query})\n", + " func_expr = FunctionExpression.from_dict(result.data)\n", + " print(f\"Function_expr: {func_expr}\")\n", + " func = tool_manager.parse_func_expr(func_expr)\n", + " func_output = await tool_manager.execute_func_async(func)\n", + " print(f\"Function output: {func_output}\")\n", + " return func_output\n", + " except Exception as e:\n", + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )\n", + " return None" + ], + "metadata": { + "id": "n9Qq7wcOU4X9" + }, + "execution_count": 24, + "outputs": [] + } + ] } diff --git a/notebooks/tutorials/adalflow_logger.ipynb b/notebooks/tutorials/adalflow_logger.ipynb index 135d6450..ae5a7d83 100644 --- a/notebooks/tutorials/adalflow_logger.ipynb +++ b/notebooks/tutorials/adalflow_logger.ipynb @@ -1,242 +1,246 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Adalflow RAG Playbook example\n", + "\n", + "There are different patterns to build a RAG:\n", + "\n", + "- RAG with separate data process pipeline and a RAG task pipeline. This fits into a scenario where there is lots of data in production database, and we preprocess the data to embeddings and then we build a RAG task pipeline that retrieves context in multiple stages.\n", + "\n", + "- RAG with dynamic data access and caching the embedding dynamically in a local storage.\n", + "\n", + "Here we will have have a look at an example with a local DB using FAISS" + ], + "metadata": { + "id": "lLGpv1fLLIjF" + } + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "sfKEfaYC3Go7" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "\n", + "clear_output()" + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + "os.environ[\"GROQ_API_KEY\"] = groq_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ], + "metadata": { "colab": { - "provenance": [] - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" + "base_uri": "https://localhost:8080/" }, - "language_info": { - "name": "python" + "id": "-4c_AGBt3PlR", + "outputId": "275b050a-ce64-4b40-a5f9-4ccc12d92add" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "Please enter your GROQ API key: ··········\n", + "API keys have been set.\n" + ] } + ] }, - "cells": [ - { - "cell_type": "markdown", - "source": [ - "# Adalflow RAG Playbook example\n", - "\n", - "There are different patterns to build a RAG:\n", - "\n", - "- RAG with separate data process pipeline and a RAG task pipeline. This fits into a scenario where there is lots of data in production database, and we preprocess the data to embeddings and then we build a RAG task pipeline that retrieves context in multiple stages.\n", - "\n", - "- RAG with dynamic data access and caching the embedding dynamically in a local storage.\n", - "\n", - "Here we will have have a look at an example with a local DB using FAISS" - ], - "metadata": { - "id": "lLGpv1fLLIjF" - } - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "id": "sfKEfaYC3Go7" - }, - "outputs": [], - "source": [ - "from IPython.display import clear_output\n", - "\n", - "!pip install -U adalflow[openai,groq,faiss-cpu]\n", - "\n", - "clear_output()\n" - ] - }, - { - "cell_type": "code", - "source": [ - "import os\n", - "from getpass import getpass\n", - "\n", - "# Prompt user to enter their API keys securely\n", - "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", - "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", - "\n", - "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", - "os.environ['GROQ_API_KEY'] = groq_api_key\n", - "\n", - "print(\"API keys have been set.\")\n" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "-4c_AGBt3PlR", - "outputId": "275b050a-ce64-4b40-a5f9-4ccc12d92add" - }, - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Please enter your OpenAI API key: ··········\n", - "Please enter your GROQ API key: ··········\n", - "API keys have been set.\n" - ] - } - ] - }, - { - "cell_type": "markdown", - "source": [ - "## Design\n", - "\n", - "Some libraries may use hooks [2] and callbacks [3] [4], or advanced web-based debugging tools [5] [6] [7]. Hooks and callbacks are conceptually similar in that they both allow users to execute custom code at specific points during the execution of a program. Both provide mechanisms to inject additional behavior in response to certain events or conditions, without modifying the core logic. PyTorch defines, registers, and executes hooks mainly in its base classes like nn.Module and Tensor, without polluting the functional and user-facing APIs.\n", - "\n", - "At this point, our objectives are:\n", - "\n", - "1. Maximize debugging capabilities via the simple logging module to keep the source code clean.\n", - "\n", - "2. Additionally, as we can’t always control the outputs of generators, we will provide customized logger and tracers(drop-in decorators) for them, for which we will explain in Tracing. This will not break the first objective.\n", - "\n", - "In the future, when we have more complex requirements from users, we will consider adding hooks/callbacks but we will do it in a way to keep the functional and user-facing APIs clean." - ], - "metadata": { - "id": "4NztjiLR_EQE" - } - }, - { - "cell_type": "code", - "source": [ - "import logging\n", - "\n", - "log = logging.getLogger(__name__)" - ], - "metadata": { - "id": "d2H1vYoC_F-g" - }, - "execution_count": 3, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "from adalflow.utils.logger import get_logger\n", - "\n", - "\n", - "root_logger = get_logger()" - ], - "metadata": { - "id": "e2GxAapG_TJH" - }, - "execution_count": 4, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "from adalflow.utils.logger import printc\n", - "\n", - "printc(\"All logging examples are done. Feeling green!\", color=\"green\")" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "Yk4oiBFE_asG", - "outputId": "470e30dc-1b31-40c1-9e48-30754ae54b45" - }, - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "\u001b[32m2024-11-28 13:39:41 - [:3:] - All logging examples are done. Feeling green!\u001b[0m\n" - ] - } - ] - }, - { - "cell_type": "markdown", - "source": [ - "Set up all logs in one file\n", - "\n", - "Assume your source code is at src/task.py. You can log simply by:" - ], - "metadata": { - "id": "B8lmlT_9_nVP" - } - }, - { - "cell_type": "code", - "source": [ - "import logging\n", - "\n", - "log = logging.getLogger(__name__)\n", - "\n", - "class Task:\n", - " def __init__(self):\n", - " log.info(\"This is a user program child logger\")" - ], - "metadata": { - "id": "o_Ru1myM_c-J" - }, - "execution_count": 6, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "import logging\n", - "from adalflow.utils.logger import get_logger\n", - "\n", - "root_logger = get_logger(level=\"DEBUG\", save_dir=\"./logs\") # log to ./logs/lib.log\n", - "\n", - "# run code from the library components such as generator\n", - "# ....\n", - "\n", - "root_logger.info(\"This is the log in the main file\")" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "o7YPjEZk_ehg", - "outputId": "ad0f58e9-6f5c-4d00-e737-2fa1ad5ebd85" - }, - "execution_count": 7, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "2024-11-28 13:39:46 - - INFO - [:9:] - This is the log in the main file\n" - ] - } - ] + { + "cell_type": "markdown", + "source": [ + "## Design\n", + "\n", + "Some libraries may use hooks [2] and callbacks [3] [4], or advanced web-based debugging tools [5] [6] [7]. Hooks and callbacks are conceptually similar in that they both allow users to execute custom code at specific points during the execution of a program. Both provide mechanisms to inject additional behavior in response to certain events or conditions, without modifying the core logic. PyTorch defines, registers, and executes hooks mainly in its base classes like nn.Module and Tensor, without polluting the functional and user-facing APIs.\n", + "\n", + "At this point, our objectives are:\n", + "\n", + "1. Maximize debugging capabilities via the simple logging module to keep the source code clean.\n", + "\n", + "2. Additionally, as we can’t always control the outputs of generators, we will provide customized logger and tracers(drop-in decorators) for them, for which we will explain in Tracing. This will not break the first objective.\n", + "\n", + "In the future, when we have more complex requirements from users, we will consider adding hooks/callbacks but we will do it in a way to keep the functional and user-facing APIs clean." + ], + "metadata": { + "id": "4NztjiLR_EQE" + } + }, + { + "cell_type": "code", + "source": [ + "import logging\n", + "\n", + "log = logging.getLogger(__name__)" + ], + "metadata": { + "id": "d2H1vYoC_F-g" + }, + "execution_count": 3, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.utils.logger import get_logger\n", + "\n", + "\n", + "root_logger = get_logger()" + ], + "metadata": { + "id": "e2GxAapG_TJH" + }, + "execution_count": 4, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from adalflow.utils.logger import printc\n", + "\n", + "printc(\"All logging examples are done. Feeling green!\", color=\"green\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "Yk4oiBFE_asG", + "outputId": "470e30dc-1b31-40c1-9e48-30754ae54b45" + }, + "execution_count": 5, + "outputs": [ { - "cell_type": "markdown", - "source": [ - "Separate library and application logs" - ], - "metadata": { - "id": "Db1_Ob3X_gpe" - } + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[32m2024-11-28 13:39:41 - [:3:] - All logging examples are done. Feeling green!\u001b[0m\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "Set up all logs in one file\n", + "\n", + "Assume your source code is at src/task.py. You can log simply by:" + ], + "metadata": { + "id": "B8lmlT_9_nVP" + } + }, + { + "cell_type": "code", + "source": [ + "import logging\n", + "\n", + "log = logging.getLogger(__name__)\n", + "\n", + "\n", + "class Task:\n", + " def __init__(self):\n", + " log.info(\"This is a user program child logger\")" + ], + "metadata": { + "id": "o_Ru1myM_c-J" + }, + "execution_count": 6, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "import logging\n", + "from adalflow.utils.logger import get_logger\n", + "\n", + "root_logger = get_logger(level=\"DEBUG\", save_dir=\"./logs\") # log to ./logs/lib.log\n", + "\n", + "# run code from the library components such as generator\n", + "# ....\n", + "\n", + "root_logger.info(\"This is the log in the main file\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "o7YPjEZk_ehg", + "outputId": "ad0f58e9-6f5c-4d00-e737-2fa1ad5ebd85" + }, + "execution_count": 7, + "outputs": [ { - "cell_type": "code", - "source": [ - "from adalflow.utils.logger import get_logger\n", - "\n", - "app_logger = get_logger(name=\"my_app\", level=\"DEBUG\", save_dir=\"./logs\") # log to ./logs/my_app.log\n", - "\n", - "class Task:\n", - " def __init__(self):\n", - " app_logger.info(\"This is a user program child logger\")" - ], - "metadata": { - "id": "rQWuFnUc_gNm" - }, - "execution_count": 8, - "outputs": [] + "output_type": "stream", + "name": "stdout", + "text": [ + "2024-11-28 13:39:46 - - INFO - [:9:] - This is the log in the main file\n" + ] } - ] + ] + }, + { + "cell_type": "markdown", + "source": [ + "Separate library and application logs" + ], + "metadata": { + "id": "Db1_Ob3X_gpe" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.utils.logger import get_logger\n", + "\n", + "app_logger = get_logger(\n", + " name=\"my_app\", level=\"DEBUG\", save_dir=\"./logs\"\n", + ") # log to ./logs/my_app.log\n", + "\n", + "\n", + "class Task:\n", + " def __init__(self):\n", + " app_logger.info(\"This is a user program child logger\")" + ], + "metadata": { + "id": "rQWuFnUc_gNm" + }, + "execution_count": 8, + "outputs": [] + } + ] } diff --git a/notebooks/tutorials/adalflow_rag_optimization.ipynb b/notebooks/tutorials/adalflow_rag_optimization.ipynb index 7ae0b152..34d208bf 100644 --- a/notebooks/tutorials/adalflow_rag_optimization.ipynb +++ b/notebooks/tutorials/adalflow_rag_optimization.ipynb @@ -1,495 +1,498 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# 🤗 Welcome to AdalFlow!\n", + "## The PyTorch library to auto-optimize any LLM task pipelines\n", + "\n", + "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", + "\n", + "\n", + "# Quick Links\n", + "\n", + "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", + "\n", + "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", + "\n", + "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", + "\n", + "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", + "\n", + "## 📖 Outline\n", + "\n", + "In this tutorial, we will cover the auto-optimization of a standard RAG:\n", + "\n", + "- Introducing HotPotQA dataset and HotPotQAData class.\n", + "\n", + "- Convert Dspy’s Retriever to AdalFlow’s Retriever to easy comparison.\n", + "\n", + "- Build the standard RAG with Retriever and Generator components.\n", + "\n", + "- Learn how to connect the output-input between components to enable auto-text-grad optimization." + ], + "metadata": { + "id": "xHF95Kr4CzGq" + } + }, + { + "cell_type": "markdown", + "source": [ + "\n", + "# Installation\n", + "\n", + "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq` from the extra packages.\n", + "\n", + " ```bash\n", + " pip install adalflow[openai,groq]\n", + " ```\n", + "2. Setup `openai` and `groq` API key in the environment variables\n", + "\n", + "You can choose to use different client. You can import the model client you prefer. We support `Anthropic`, `Cohere`, `Google`, `GROQ`, `OpenAI`, `Transformer` and more in development. We will use OpenAI here as an example.Please refer to our [full installation guide](https://adalflow.sylph.ai/get_started/installation.html)" + ], + "metadata": { + "id": "Kof5M6DRaKhh" + } + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "id": "tAp3eDjOCma1" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai] # also install the package for the model client you'll use\n", + "!pip install dspy\n", + "!pip install datasets\n", + "clear_output()" + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Set Environment Variables\n", + "\n", + "Run the following code and pass your api key.\n", + "\n", + "Note: for normal `.py` projects, follow our [official installation guide](https://lightrag.sylph.ai/get_started/installation.html).\n", + "\n", + "*Go to [OpenAI](https://platform.openai.com/docs/introduction) to get API keys if you don't already have.*" + ], + "metadata": { + "id": "KapUyHMM07pJ" + } + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ], + "metadata": { "colab": { - "provenance": [] + "base_uri": "https://localhost:8080/" }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" + "id": "ONfzF9Puzdd_", + "outputId": "5fc0cd30-9ae7-443a-c06c-31e9edeafd69" + }, + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "API keys have been set.\n" + ] } + ] }, - "cells": [ - { - "cell_type": "markdown", - "source": [ - "# 🤗 Welcome to AdalFlow!\n", - "## The PyTorch library to auto-optimize any LLM task pipelines\n", - "\n", - "Thanks for trying us out, we're here to provide you with the best LLM application development experience you can dream of 😊 any questions or concerns you may have, [come talk to us on discord,](https://discord.gg/ezzszrRZvT) we're always here to help! ⭐ Star us on Github ⭐\n", - "\n", - "\n", - "# Quick Links\n", - "\n", - "Github repo: https://github.com/SylphAI-Inc/AdalFlow\n", - "\n", - "Full Tutorials: https://adalflow.sylph.ai/index.html#.\n", - "\n", - "Deep dive on each API: check out the [developer notes](https://adalflow.sylph.ai/tutorials/index.html).\n", - "\n", - "Common use cases along with the auto-optimization: check out [Use cases](https://adalflow.sylph.ai/use_cases/index.html).\n", - "\n", - "## 📖 Outline\n", - "\n", - "In this tutorial, we will cover the auto-optimization of a standard RAG:\n", - "\n", - "- Introducing HotPotQA dataset and HotPotQAData class.\n", - "\n", - "- Convert Dspy’s Retriever to AdalFlow’s Retriever to easy comparison.\n", - "\n", - "- Build the standard RAG with Retriever and Generator components.\n", - "\n", - "- Learn how to connect the output-input between components to enable auto-text-grad optimization." - ], - "metadata": { - "id": "xHF95Kr4CzGq" - } - }, - { - "cell_type": "markdown", - "source": [ - "\n", - "# Installation\n", - "\n", - "1. Use `pip` to install the `adalflow` Python package. We will need `openai`, `groq` from the extra packages.\n", - "\n", - " ```bash\n", - " pip install adalflow[openai,groq]\n", - " ```\n", - "2. Setup `openai` and `groq` API key in the environment variables\n", - "\n", - "You can choose to use different client. You can import the model client you prefer. We support `Anthropic`, `Cohere`, `Google`, `GROQ`, `OpenAI`, `Transformer` and more in development. We will use OpenAI here as an example.Please refer to our [full installation guide](https://adalflow.sylph.ai/get_started/installation.html)" - ], - "metadata": { - "id": "Kof5M6DRaKhh" - } - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "id": "tAp3eDjOCma1" - }, - "outputs": [], - "source": [ - "from IPython.display import clear_output\n", - "\n", - "!pip install -U adalflow[openai] # also install the package for the model client you'll use\n", - "!pip install dspy\n", - "!pip install datasets\n", - "clear_output()" - ] - }, - { - "cell_type": "markdown", - "source": [ - "## Set Environment Variables\n", - "\n", - "Run the following code and pass your api key.\n", - "\n", - "Note: for normal `.py` projects, follow our [official installation guide](https://lightrag.sylph.ai/get_started/installation.html).\n", - "\n", - "*Go to [OpenAI](https://platform.openai.com/docs/introduction) to get API keys if you don't already have.*" - ], - "metadata": { - "id": "KapUyHMM07pJ" - } - }, - { - "cell_type": "code", - "source": [ - "import os\n", - "\n", - "from getpass import getpass\n", - "\n", - "# Prompt user to enter their API keys securely\n", - "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", - "\n", - "\n", - "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", - "\n", - "print(\"API keys have been set.\")" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "ONfzF9Puzdd_", - "outputId": "5fc0cd30-9ae7-443a-c06c-31e9edeafd69" - }, - "execution_count": 3, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Please enter your OpenAI API key: ··········\n", - "API keys have been set.\n" - ] - } - ] - }, - { - "cell_type": "code", - "source": [ - "import dspy\n", - "import re\n", - "from typing import List, Union, Optional, Dict, Callable, Any, Tuple\n", - "from dataclasses import dataclass, field\n", - "import adalflow as adal\n", - "from adalflow.optim.parameter import Parameter, ParameterType\n", - "from adalflow.datasets.hotpot_qa import HotPotQA, HotPotQAData\n", - "from adalflow.datasets.types import Example\n", - "from adalflow.core.types import RetrieverOutput\n", - "from adalflow.core import Component, Generator\n", - "from adalflow.core.retriever import Retriever\n", - "from adalflow.core.component import fun_to_component\n", - "from adalflow.components.model_client.openai_client import OpenAIClient" - ], - "metadata": { - "id": "aE3I05BqOmd7" - }, - "execution_count": 20, - "outputs": [] + { + "cell_type": "code", + "source": [ + "import dspy\n", + "import re\n", + "from typing import List, Union, Optional, Dict, Callable, Any, Tuple\n", + "from dataclasses import dataclass, field\n", + "import adalflow as adal\n", + "from adalflow.optim.parameter import Parameter, ParameterType\n", + "from adalflow.datasets.hotpot_qa import HotPotQA, HotPotQAData\n", + "from adalflow.datasets.types import Example\n", + "from adalflow.core.types import RetrieverOutput\n", + "from adalflow.core import Component, Generator\n", + "from adalflow.core.retriever import Retriever\n", + "from adalflow.core.component import fun_to_component\n", + "from adalflow.components.model_client.openai_client import OpenAIClient" + ], + "metadata": { + "id": "aE3I05BqOmd7" + }, + "execution_count": 20, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "gpt_4o_model = {\n", + " \"model_client\": OpenAIClient(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-4o-mini\",\n", + " \"max_tokens\": 2000,\n", + " },\n", + "}\n", + "\n", + "gpt_3_model = {\n", + " \"model_client\": OpenAIClient(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-3.5-turbo\",\n", + " \"max_tokens\": 2000,\n", + " },\n", + "}" + ], + "metadata": { + "id": "cqUUoua9fUxQ" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def load_datasets():\n", + "\n", + " trainset = HotPotQA(split=\"train\", size=20)\n", + " valset = HotPotQA(split=\"val\", size=50)\n", + " testset = HotPotQA(split=\"test\", size=50)\n", + " print(f\"trainset, valset: {len(trainset)}, {len(valset)}, example: {trainset[0]}\")\n", + " return trainset, valset, testset\n", + "\n", + "\n", + "@dataclass\n", + "class AnswerData(adal.DataClass):\n", + " reasoning: str = field(\n", + " metadata={\"desc\": \"The reasoning to produce the answer\"},\n", + " )\n", + " answer: str = field(\n", + " metadata={\"desc\": \"The answer you produced\"},\n", + " )\n", + "\n", + " __output_fields__ = [\"reasoning\", \"answer\"]\n", + "\n", + "\n", + "dataset = HotPotQA(split=\"train\", size=20)\n", + "print(dataset[0], type(dataset[0]))\n", + "\n", + "HotPotQAData(\n", + " id=\"5a8b57f25542995d1e6f1371\",\n", + " question=\"Were Scott Derrickson and Ed Wood of the same nationality?\",\n", + " answer=\"yes\",\n", + " gold_titles=\"{'Scott Derrickson', 'Ed Wood'}\",\n", + ")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "0irHeHUkOmL8", + "outputId": "61f778a2-9ec1-4fda-daa2-bcc7f31baa78" + }, + "execution_count": 22, + "outputs": [ { - "cell_type": "code", - "source": [ - "\n", - "gpt_4o_model = {\n", - " \"model_client\": OpenAIClient(),\n", - " \"model_kwargs\": {\n", - " \"model\": \"gpt-4o-mini\",\n", - " \"max_tokens\": 2000,\n", - " },\n", - "}\n", - "\n", - "gpt_3_model = {\n", - " \"model_client\": OpenAIClient(),\n", - " \"model_kwargs\": {\n", - " \"model\": \"gpt-3.5-turbo\",\n", - " \"max_tokens\": 2000,\n", - " },\n", - "}" - ], - "metadata": { - "id": "cqUUoua9fUxQ" - }, - "execution_count": null, - "outputs": [] + "output_type": "stream", + "name": "stdout", + "text": [ + "HotPotQAData(id='5a8b57f25542995d1e6f1371', question='Were Scott Derrickson and Ed Wood of the same nationality?', answer='yes', gold_titles=\"{'Scott Derrickson', 'Ed Wood'}\") \n" + ] }, { - "cell_type": "code", - "source": [ - "def load_datasets():\n", - "\n", - " trainset = HotPotQA(split=\"train\", size=20)\n", - " valset = HotPotQA(split=\"val\", size=50)\n", - " testset = HotPotQA(split=\"test\", size=50)\n", - " print(f\"trainset, valset: {len(trainset)}, {len(valset)}, example: {trainset[0]}\")\n", - " return trainset, valset, testset\n", - "\n", - "\n", - "@dataclass\n", - "class AnswerData(adal.DataClass):\n", - " reasoning: str = field(\n", - " metadata={\"desc\": \"The reasoning to produce the answer\"},\n", - " )\n", - " answer: str = field(\n", - " metadata={\"desc\": \"The answer you produced\"},\n", - " )\n", - "\n", - " __output_fields__ = [\"reasoning\", \"answer\"]\n", - "\n", - "\n", - "dataset = HotPotQA(split=\"train\", size=20)\n", - "print(dataset[0], type(dataset[0]))\n", - "\n", - "HotPotQAData(id='5a8b57f25542995d1e6f1371', question='Were Scott Derrickson and Ed Wood of the same nationality?', answer='yes', gold_titles=\"{'Scott Derrickson', 'Ed Wood'}\")" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "0irHeHUkOmL8", - "outputId": "61f778a2-9ec1-4fda-daa2-bcc7f31baa78" - }, - "execution_count": 22, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "HotPotQAData(id='5a8b57f25542995d1e6f1371', question='Were Scott Derrickson and Ed Wood of the same nationality?', answer='yes', gold_titles=\"{'Scott Derrickson', 'Ed Wood'}\") \n" - ] - }, - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "HotPotQAData(id='5a8b57f25542995d1e6f1371', question='Were Scott Derrickson and Ed Wood of the same nationality?', answer='yes', gold_titles=\"{'Scott Derrickson', 'Ed Wood'}\")" - ] - }, - "metadata": {}, - "execution_count": 22 - } + "output_type": "execute_result", + "data": { + "text/plain": [ + "HotPotQAData(id='5a8b57f25542995d1e6f1371', question='Were Scott Derrickson and Ed Wood of the same nationality?', answer='yes', gold_titles=\"{'Scott Derrickson', 'Ed Wood'}\")" ] - }, - { - "cell_type": "code", - "source": [ - "class DspyRetriever(adal.Retriever):\n", - " def __init__(self, top_k: int = 3):\n", - " super().__init__()\n", - " self.top_k = top_k\n", - " self.dspy_retriever = dspy.Retrieve(k=top_k)\n", - "\n", - " def call(self, input: str, top_k: Optional[int] = None) -> List[adal.RetrieverOutput]:\n", - "\n", - " k = top_k or self.top_k\n", - "\n", - " output = self.dspy_retriever(query_or_queries=input, k=k)\n", - " final_output: List[RetrieverOutput] = []\n", - " documents = output.passages\n", - "\n", - " final_output.append(\n", - " RetrieverOutput(\n", - " query=input,\n", - " documents=documents,\n", - " doc_indices=[],\n", - " )\n", - " )\n", - " return final_output\n", - "\n", - "def test_retriever():\n", - " question = \"How many storeys are in the castle that David Gregory inherited?\"\n", - " retriever = DspyRetriever(top_k=3)\n", - " retriever_out = retriever(input=question)\n", - " print(f\"retriever_out: {retriever_out}\")\n", - "\n", - "\n", - "def call(\n", - " self, question: str, id: Optional[str] = None\n", - " ) -> Union[adal.GeneratorOutput, adal.Parameter]:\n", - " prompt_kwargs = self._prepare_input(question)\n", - " output = self.llm(prompt_kwargs=prompt_kwargs, id=id)\n", - " return output\n", - "\n", - "\n", - "def call(self, question: str, id: str = None) -> adal.GeneratorOutput:\n", - " if self.training:\n", - " raise ValueError(\n", - " \"This component is not supposed to be called in training mode\"\n", - " )\n", - "\n", - " retriever_out = self.retriever.call(input=question)\n", - "\n", - " successor_map_fn = lambda x: ( # noqa E731\n", - " \"\\n\\n\".join(x[0].documents) if x and x[0] and x[0].documents else \"\"\n", - " )\n", - " retrieved_context = successor_map_fn(retriever_out)\n", - "\n", - " prompt_kwargs = {\n", - " \"context\": retrieved_context,\n", - " \"question\": question,\n", - " }\n", - "\n", - " output = self.llm.call(\n", - " prompt_kwargs=prompt_kwargs,\n", - " id=id,\n", - " )\n", - " return output\n", - "\n", - "\n", - "def forward(self, question: str, id: str = None) -> adal.Parameter:\n", - " if not self.training:\n", - " raise ValueError(\"This component is not supposed to be called in eval mode\")\n", - " retriever_out = self.retriever.forward(input=question)\n", - " successor_map_fn = lambda x: ( # noqa E731\n", - " \"\\n\\n\".join(x.data[0].documents)\n", - " if x.data and x.data[0] and x.data[0].documents\n", - " else \"\"\n", - " )\n", - " retriever_out.add_successor_map_fn(successor=self.llm, map_fn=successor_map_fn)\n", - " generator_out = self.llm.forward(\n", - " prompt_kwargs={\"question\": question, \"context\": retriever_out}, id=id\n", - " )\n", - " return generator_out\n", - "\n", - "\n", - "def bicall(\n", - " self, question: str, id: str = None\n", - ") -> Union[adal.GeneratorOutput, adal.Parameter]:\n", - " \"\"\"You can also combine both the forward and call in the same function.\n", - " Supports both training and eval mode by using __call__ for GradComponents\n", - " like Retriever and Generator\n", - " \"\"\"\n", - " retriever_out = self.retriever(input=question)\n", - " if isinstance(retriever_out, adal.Parameter):\n", - " successor_map_fn = lambda x: ( # noqa E731\n", - " \"\\n\\n\".join(x.data[0].documents)\n", - " if x.data and x.data[0] and x.data[0].documents\n", - " else \"\"\n", - " )\n", - " retriever_out.add_successor_map_fn(\n", - " successor=self.llm, map_fn=successor_map_fn\n", - " )\n", - " else:\n", - " successor_map_fn = lambda x: ( # noqa E731\n", - " \"\\n\\n\".join(x[0].documents) if x and x[0] and x[0].documents else \"\"\n", - " )\n", - " retrieved_context = successor_map_fn(retriever_out)\n", - " prompt_kwargs = {\n", - " \"context\": retrieved_context,\n", - " \"question\": question,\n", - " }\n", - " output = self.llm(prompt_kwargs=prompt_kwargs, id=id)\n", - " return output\n", - "\n", - "task_desc_str = r\"\"\"Answer questions with short factoid answers.\n", - "\n", - "You will receive context(may contain relevant facts) and a question.\n", - "Think step by step.\"\"\"\n", - "\n", - "\n", - "class VanillaRAG(adal.GradComponent):\n", - " def __init__(self, passages_per_hop=3, model_client=None, model_kwargs=None):\n", - " super().__init__()\n", - "\n", - " self.passages_per_hop = passages_per_hop\n", - "\n", - " self.retriever = DspyRetriever(top_k=passages_per_hop)\n", - " self.llm_parser = adal.DataClassParser(\n", - " data_class=AnswerData, return_data_class=True, format_type=\"json\"\n", - " )\n", - " self.llm = Generator(\n", - " model_client=model_client,\n", - " model_kwargs=model_kwargs,\n", - " prompt_kwargs={\n", - " \"task_desc_str\": adal.Parameter(\n", - " data=task_desc_str,\n", - " role_desc=\"Task description for the language model\",\n", - " param_type=adal.ParameterType.PROMPT,\n", - " ),\n", - " \"few_shot_demos\": adal.Parameter(\n", - " data=None,\n", - " requires_opt=True,\n", - " role_desc=\"To provide few shot demos to the language model\",\n", - " param_type=adal.ParameterType.DEMOS,\n", - " ),\n", - " \"output_format_str\": self.llm_parser.get_output_format_str(),\n", - " },\n", - " template=answer_template,\n", - " output_processors=self.llm_parser,\n", - " use_cache=True,\n", - " )\n", - "\n", - "\n", - "class VallinaRAGAdal(adal.AdalComponent):\n", - " def __init__(\n", - " self,\n", - " model_client: adal.ModelClient,\n", - " model_kwargs: Dict,\n", - " backward_engine_model_config: Dict | None = None,\n", - " teacher_model_config: Dict | None = None,\n", - " text_optimizer_model_config: Dict | None = None,\n", - " ):\n", - " task = VanillaRAG(\n", - " model_client=model_client,\n", - " model_kwargs=model_kwargs,\n", - " passages_per_hop=3,\n", - " )\n", - " eval_fn = AnswerMatchAcc(type=\"fuzzy_match\").compute_single_item\n", - " loss_fn = adal.EvalFnToTextLoss(\n", - " eval_fn=eval_fn, eval_fn_desc=\"fuzzy_match: 1 if str(y) in str(y_gt) else 0\"\n", - " )\n", - " super().__init__(\n", - " task=task,\n", - " eval_fn=eval_fn,\n", - " loss_fn=loss_fn,\n", - " backward_engine_model_config=backward_engine_model_config,\n", - " teacher_model_config=teacher_model_config,\n", - " text_optimizer_model_config=text_optimizer_model_config,\n", - " )\n", - "\n", - " # tell the trainer how to call the task\n", - " def prepare_task(self, sample: HotPotQAData) -> Tuple[Callable[..., Any], Dict]:\n", - " if self.task.training:\n", - " return self.task.forward, {\"question\": sample.question, \"id\": sample.id}\n", - " else:\n", - " return self.task.call, {\"question\": sample.question, \"id\": sample.id}\n", - "\n", - "\n", - " # eval mode: get the generator output, directly engage with the eval_fn\n", - " def prepare_eval(self, sample: HotPotQAData, y_pred: adal.GeneratorOutput) -> float:\n", - " y_label = \"\"\n", - " if y_pred and y_pred.data and y_pred.data.answer:\n", - " y_label = y_pred.data.answer\n", - " return self.eval_fn, {\"y\": y_label, \"y_gt\": sample.answer}\n", - "\n", - "\n", - " # train mode: get the loss and get the data from the full_response\n", - " def prepare_loss(self, sample: HotPotQAData, pred: adal.Parameter):\n", - " # prepare gt parameter\n", - " y_gt = adal.Parameter(\n", - " name=\"y_gt\",\n", - " data=sample.answer,\n", - " eval_input=sample.answer,\n", - " requires_opt=False,\n", - " )\n", - "\n", - " # pred's full_response is the output of the task pipeline which is GeneratorOutput\n", - " pred.eval_input = (\n", - " pred.full_response.data.answer\n", - " if pred.full_response\n", - " and pred.full_response.data\n", - " and pred.full_response.data.answer\n", - " else \"\"\n", - " )\n", - " return self.loss_fn, {\"kwargs\": {\"y\": pred, \"y_gt\": y_gt}}\n", - "\n", - "def train_diagnose(\n", - " model_client: adal.ModelClient,\n", - " model_kwargs: Dict,\n", - ") -> Dict:\n", - "\n", - " trainset, valset, testset = load_datasets()\n", - "\n", - " adal_component = VallinaRAGAdal(\n", - " model_client,\n", - " model_kwargs,\n", - " backward_engine_model_config=gpt_4o_model,\n", - " teacher_model_config=gpt_3_model,\n", - " text_optimizer_model_config=gpt_3_model,\n", - " )\n", - " trainer = adal.Trainer(adaltask=adal_component)\n", - " trainer.diagnose(dataset=trainset, split=\"train\")\n", - " # trainer.diagnose(dataset=valset, split=\"val\")\n", - " # trainer.diagnose(dataset=testset, split=\"test\")\n" - ], - "metadata": { - "id": "ZZIEtZYHNVjo" - }, - "execution_count": 23, - "outputs": [] - }, - { - "cell_type": "markdown", - "source": [ - "# Issues and feedback\n", - "\n", - "If you encounter any issues, please report them here: [GitHub Issues](https://github.com/SylphAI-Inc/LightRAG/issues).\n", - "\n", - "For feedback, you can use either the [GitHub discussions](https://github.com/SylphAI-Inc/LightRAG/discussions) or [Discord](https://discord.gg/ezzszrRZvT)." - ], - "metadata": { - "id": "AmkbyxmuruUu" - } + }, + "metadata": {}, + "execution_count": 22 } - ] + ] + }, + { + "cell_type": "code", + "source": [ + "class DspyRetriever(adal.Retriever):\n", + " def __init__(self, top_k: int = 3):\n", + " super().__init__()\n", + " self.top_k = top_k\n", + " self.dspy_retriever = dspy.Retrieve(k=top_k)\n", + "\n", + " def call(\n", + " self, input: str, top_k: Optional[int] = None\n", + " ) -> List[adal.RetrieverOutput]:\n", + "\n", + " k = top_k or self.top_k\n", + "\n", + " output = self.dspy_retriever(query_or_queries=input, k=k)\n", + " final_output: List[RetrieverOutput] = []\n", + " documents = output.passages\n", + "\n", + " final_output.append(\n", + " RetrieverOutput(\n", + " query=input,\n", + " documents=documents,\n", + " doc_indices=[],\n", + " )\n", + " )\n", + " return final_output\n", + "\n", + "\n", + "def test_retriever():\n", + " question = \"How many storeys are in the castle that David Gregory inherited?\"\n", + " retriever = DspyRetriever(top_k=3)\n", + " retriever_out = retriever(input=question)\n", + " print(f\"retriever_out: {retriever_out}\")\n", + "\n", + "\n", + "def call(\n", + " self, question: str, id: Optional[str] = None\n", + ") -> Union[adal.GeneratorOutput, adal.Parameter]:\n", + " prompt_kwargs = self._prepare_input(question)\n", + " output = self.llm(prompt_kwargs=prompt_kwargs, id=id)\n", + " return output\n", + "\n", + "\n", + "def call(self, question: str, id: str = None) -> adal.GeneratorOutput:\n", + " if self.training:\n", + " raise ValueError(\"This component is not supposed to be called in training mode\")\n", + "\n", + " retriever_out = self.retriever.call(input=question)\n", + "\n", + " successor_map_fn = lambda x: ( # noqa E731\n", + " \"\\n\\n\".join(x[0].documents) if x and x[0] and x[0].documents else \"\"\n", + " )\n", + " retrieved_context = successor_map_fn(retriever_out)\n", + "\n", + " prompt_kwargs = {\n", + " \"context\": retrieved_context,\n", + " \"question\": question,\n", + " }\n", + "\n", + " output = self.llm.call(\n", + " prompt_kwargs=prompt_kwargs,\n", + " id=id,\n", + " )\n", + " return output\n", + "\n", + "\n", + "def forward(self, question: str, id: str = None) -> adal.Parameter:\n", + " if not self.training:\n", + " raise ValueError(\"This component is not supposed to be called in eval mode\")\n", + " retriever_out = self.retriever.forward(input=question)\n", + " successor_map_fn = lambda x: ( # noqa E731\n", + " \"\\n\\n\".join(x.data[0].documents)\n", + " if x.data and x.data[0] and x.data[0].documents\n", + " else \"\"\n", + " )\n", + " retriever_out.add_successor_map_fn(successor=self.llm, map_fn=successor_map_fn)\n", + " generator_out = self.llm.forward(\n", + " prompt_kwargs={\"question\": question, \"context\": retriever_out}, id=id\n", + " )\n", + " return generator_out\n", + "\n", + "\n", + "def bicall(\n", + " self, question: str, id: str = None\n", + ") -> Union[adal.GeneratorOutput, adal.Parameter]:\n", + " \"\"\"You can also combine both the forward and call in the same function.\n", + " Supports both training and eval mode by using __call__ for GradComponents\n", + " like Retriever and Generator\n", + " \"\"\"\n", + " retriever_out = self.retriever(input=question)\n", + " if isinstance(retriever_out, adal.Parameter):\n", + " successor_map_fn = lambda x: ( # noqa E731\n", + " \"\\n\\n\".join(x.data[0].documents)\n", + " if x.data and x.data[0] and x.data[0].documents\n", + " else \"\"\n", + " )\n", + " retriever_out.add_successor_map_fn(successor=self.llm, map_fn=successor_map_fn)\n", + " else:\n", + " successor_map_fn = lambda x: ( # noqa E731\n", + " \"\\n\\n\".join(x[0].documents) if x and x[0] and x[0].documents else \"\"\n", + " )\n", + " retrieved_context = successor_map_fn(retriever_out)\n", + " prompt_kwargs = {\n", + " \"context\": retrieved_context,\n", + " \"question\": question,\n", + " }\n", + " output = self.llm(prompt_kwargs=prompt_kwargs, id=id)\n", + " return output\n", + "\n", + "\n", + "task_desc_str = r\"\"\"Answer questions with short factoid answers.\n", + "\n", + "You will receive context(may contain relevant facts) and a question.\n", + "Think step by step.\"\"\"\n", + "\n", + "\n", + "class VanillaRAG(adal.GradComponent):\n", + " def __init__(self, passages_per_hop=3, model_client=None, model_kwargs=None):\n", + " super().__init__()\n", + "\n", + " self.passages_per_hop = passages_per_hop\n", + "\n", + " self.retriever = DspyRetriever(top_k=passages_per_hop)\n", + " self.llm_parser = adal.DataClassParser(\n", + " data_class=AnswerData, return_data_class=True, format_type=\"json\"\n", + " )\n", + " self.llm = Generator(\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " prompt_kwargs={\n", + " \"task_desc_str\": adal.Parameter(\n", + " data=task_desc_str,\n", + " role_desc=\"Task description for the language model\",\n", + " param_type=adal.ParameterType.PROMPT,\n", + " ),\n", + " \"few_shot_demos\": adal.Parameter(\n", + " data=None,\n", + " requires_opt=True,\n", + " role_desc=\"To provide few shot demos to the language model\",\n", + " param_type=adal.ParameterType.DEMOS,\n", + " ),\n", + " \"output_format_str\": self.llm_parser.get_output_format_str(),\n", + " },\n", + " template=answer_template,\n", + " output_processors=self.llm_parser,\n", + " use_cache=True,\n", + " )\n", + "\n", + "\n", + "class VallinaRAGAdal(adal.AdalComponent):\n", + " def __init__(\n", + " self,\n", + " model_client: adal.ModelClient,\n", + " model_kwargs: Dict,\n", + " backward_engine_model_config: Dict | None = None,\n", + " teacher_model_config: Dict | None = None,\n", + " text_optimizer_model_config: Dict | None = None,\n", + " ):\n", + " task = VanillaRAG(\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " passages_per_hop=3,\n", + " )\n", + " eval_fn = AnswerMatchAcc(type=\"fuzzy_match\").compute_single_item\n", + " loss_fn = adal.EvalFnToTextLoss(\n", + " eval_fn=eval_fn, eval_fn_desc=\"fuzzy_match: 1 if str(y) in str(y_gt) else 0\"\n", + " )\n", + " super().__init__(\n", + " task=task,\n", + " eval_fn=eval_fn,\n", + " loss_fn=loss_fn,\n", + " backward_engine_model_config=backward_engine_model_config,\n", + " teacher_model_config=teacher_model_config,\n", + " text_optimizer_model_config=text_optimizer_model_config,\n", + " )\n", + "\n", + " # tell the trainer how to call the task\n", + " def prepare_task(self, sample: HotPotQAData) -> Tuple[Callable[..., Any], Dict]:\n", + " if self.task.training:\n", + " return self.task.forward, {\"question\": sample.question, \"id\": sample.id}\n", + " else:\n", + " return self.task.call, {\"question\": sample.question, \"id\": sample.id}\n", + "\n", + " # eval mode: get the generator output, directly engage with the eval_fn\n", + " def prepare_eval(self, sample: HotPotQAData, y_pred: adal.GeneratorOutput) -> float:\n", + " y_label = \"\"\n", + " if y_pred and y_pred.data and y_pred.data.answer:\n", + " y_label = y_pred.data.answer\n", + " return self.eval_fn, {\"y\": y_label, \"y_gt\": sample.answer}\n", + "\n", + " # train mode: get the loss and get the data from the full_response\n", + " def prepare_loss(self, sample: HotPotQAData, pred: adal.Parameter):\n", + " # prepare gt parameter\n", + " y_gt = adal.Parameter(\n", + " name=\"y_gt\",\n", + " data=sample.answer,\n", + " eval_input=sample.answer,\n", + " requires_opt=False,\n", + " )\n", + "\n", + " # pred's full_response is the output of the task pipeline which is GeneratorOutput\n", + " pred.eval_input = (\n", + " pred.full_response.data.answer\n", + " if pred.full_response\n", + " and pred.full_response.data\n", + " and pred.full_response.data.answer\n", + " else \"\"\n", + " )\n", + " return self.loss_fn, {\"kwargs\": {\"y\": pred, \"y_gt\": y_gt}}\n", + "\n", + "\n", + "def train_diagnose(\n", + " model_client: adal.ModelClient,\n", + " model_kwargs: Dict,\n", + ") -> Dict:\n", + "\n", + " trainset, valset, testset = load_datasets()\n", + "\n", + " adal_component = VallinaRAGAdal(\n", + " model_client,\n", + " model_kwargs,\n", + " backward_engine_model_config=gpt_4o_model,\n", + " teacher_model_config=gpt_3_model,\n", + " text_optimizer_model_config=gpt_3_model,\n", + " )\n", + " trainer = adal.Trainer(adaltask=adal_component)\n", + " trainer.diagnose(dataset=trainset, split=\"train\")\n", + " # trainer.diagnose(dataset=valset, split=\"val\")\n", + " # trainer.diagnose(dataset=testset, split=\"test\")" + ], + "metadata": { + "id": "ZZIEtZYHNVjo" + }, + "execution_count": 23, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "# Issues and feedback\n", + "\n", + "If you encounter any issues, please report them here: [GitHub Issues](https://github.com/SylphAI-Inc/LightRAG/issues).\n", + "\n", + "For feedback, you can use either the [GitHub discussions](https://github.com/SylphAI-Inc/LightRAG/discussions) or [Discord](https://discord.gg/ezzszrRZvT)." + ], + "metadata": { + "id": "AmkbyxmuruUu" + } + } + ] } diff --git a/notebooks/tutorials/adalflow_rag_playbook.ipynb b/notebooks/tutorials/adalflow_rag_playbook.ipynb index 27c6bda0..308ade6e 100644 --- a/notebooks/tutorials/adalflow_rag_playbook.ipynb +++ b/notebooks/tutorials/adalflow_rag_playbook.ipynb @@ -1,522 +1,526 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Adalflow RAG Playbook example\n", + "\n", + "There are different patterns to build a RAG:\n", + "\n", + "- RAG with separate data process pipeline and a RAG task pipeline. This fits into a scenario where there is lots of data in production database, and we preprocess the data to embeddings and then we build a RAG task pipeline that retrieves context in multiple stages.\n", + "\n", + "- RAG with dynamic data access and caching the embedding dynamically in a local storage.\n", + "\n", + "Here we will have have a look at an example with a local DB using FAISS" + ], + "metadata": { + "id": "lLGpv1fLLIjF" + } + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "sfKEfaYC3Go7" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "\n", + "clear_output()" + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + "os.environ[\"GROQ_API_KEY\"] = groq_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ], + "metadata": { "colab": { - "provenance": [] + "base_uri": "https://localhost:8080/" }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" + "id": "-4c_AGBt3PlR", + "outputId": "a36f157b-0b18-4f3d-d5a8-09aa94743922" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "Please enter your GROQ API key: ··········\n", + "API keys have been set.\n" + ] } + ] }, - "cells": [ - { - "cell_type": "markdown", - "source": [ - "# Adalflow RAG Playbook example\n", - "\n", - "There are different patterns to build a RAG:\n", - "\n", - "- RAG with separate data process pipeline and a RAG task pipeline. This fits into a scenario where there is lots of data in production database, and we preprocess the data to embeddings and then we build a RAG task pipeline that retrieves context in multiple stages.\n", - "\n", - "- RAG with dynamic data access and caching the embedding dynamically in a local storage.\n", - "\n", - "Here we will have have a look at an example with a local DB using FAISS" - ], - "metadata": { - "id": "lLGpv1fLLIjF" - } - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "id": "sfKEfaYC3Go7" - }, - "outputs": [], - "source": [ - "from IPython.display import clear_output\n", - "\n", - "!pip install -U adalflow[openai,groq,faiss-cpu]\n", - "\n", - "clear_output()\n" - ] - }, - { - "cell_type": "code", - "source": [ - "import os\n", - "from getpass import getpass\n", - "\n", - "# Prompt user to enter their API keys securely\n", - "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", - "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", - "\n", - "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", - "os.environ['GROQ_API_KEY'] = groq_api_key\n", - "\n", - "print(\"API keys have been set.\")\n" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "-4c_AGBt3PlR", - "outputId": "a36f157b-0b18-4f3d-d5a8-09aa94743922" - }, - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Please enter your OpenAI API key: ··········\n", - "Please enter your GROQ API key: ··········\n", - "API keys have been set.\n" - ] - } - ] - }, - { - "cell_type": "code", - "source": [ - "from typing import Any, List, Optional\n", - "import os\n", - "from adalflow.core import Component, Generator, Embedder, Sequential\n", - "from adalflow.core.types import Document, ModelClientType\n", - "from adalflow.core.string_parser import JsonParser\n", - "from adalflow.core.db import LocalDB\n", - "from adalflow.utils import setup_env\n", - "from adalflow.components.retriever.faiss_retriever import FAISSRetriever\n", - "from adalflow.components.data_process import (\n", - " RetrieverOutputToContextStr,\n", - " ToEmbeddings,\n", - " TextSplitter,\n", - ")\n", - "from adalflow.utils.global_config import get_adalflow_default_root_path\n" - ], - "metadata": { - "id": "V9LsGDnm3RbV" - }, - "execution_count": 4, - "outputs": [] - }, - { - "cell_type": "code", - "source": [ - "configs = {\n", - " \"embedder\": {\n", - " \"batch_size\": 100,\n", - " \"model_kwargs\": {\n", - " \"model\": \"text-embedding-3-small\",\n", - " \"dimensions\": 256,\n", - " \"encoding_format\": \"float\",\n", - " },\n", - " },\n", - " \"retriever\": {\n", - " \"top_k\": 5,\n", - " },\n", - " \"generator\": {\n", - " \"model_client\": ModelClientType.OPENAI(),\n", - " \"model_kwargs\": {\n", - " \"model\": \"gpt-3.5-turbo\",\n", - " \"temperature\": 0.3,\n", - " \"stream\": False,\n", - " },\n", - " },\n", - " \"text_splitter\": {\n", - " \"split_by\": \"word\",\n", - " \"chunk_size\": 400,\n", - " \"chunk_overlap\": 200,\n", - " },\n", - "}\n" - ], - "metadata": { - "id": "kWGTZxrw3Tli" - }, - "execution_count": 5, - "outputs": [] + { + "cell_type": "code", + "source": [ + "from typing import Any, List, Optional\n", + "import os\n", + "from adalflow.core import Component, Generator, Embedder, Sequential\n", + "from adalflow.core.types import Document, ModelClientType\n", + "from adalflow.core.string_parser import JsonParser\n", + "from adalflow.core.db import LocalDB\n", + "from adalflow.utils import setup_env\n", + "from adalflow.components.retriever.faiss_retriever import FAISSRetriever\n", + "from adalflow.components.data_process import (\n", + " RetrieverOutputToContextStr,\n", + " ToEmbeddings,\n", + " TextSplitter,\n", + ")\n", + "from adalflow.utils.global_config import get_adalflow_default_root_path" + ], + "metadata": { + "id": "V9LsGDnm3RbV" + }, + "execution_count": 4, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "configs = {\n", + " \"embedder\": {\n", + " \"batch_size\": 100,\n", + " \"model_kwargs\": {\n", + " \"model\": \"text-embedding-3-small\",\n", + " \"dimensions\": 256,\n", + " \"encoding_format\": \"float\",\n", + " },\n", + " },\n", + " \"retriever\": {\n", + " \"top_k\": 5,\n", + " },\n", + " \"generator\": {\n", + " \"model_client\": ModelClientType.OPENAI(),\n", + " \"model_kwargs\": {\n", + " \"model\": \"gpt-3.5-turbo\",\n", + " \"temperature\": 0.3,\n", + " \"stream\": False,\n", + " },\n", + " },\n", + " \"text_splitter\": {\n", + " \"split_by\": \"word\",\n", + " \"chunk_size\": 400,\n", + " \"chunk_overlap\": 200,\n", + " },\n", + "}" + ], + "metadata": { + "id": "kWGTZxrw3Tli" + }, + "execution_count": 5, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def prepare_data_pipeline():\n", + " splitter = TextSplitter(**configs[\"text_splitter\"])\n", + " embedder = Embedder(\n", + " model_client=ModelClientType.OPENAI(),\n", + " model_kwargs=configs[\"embedder\"][\"model_kwargs\"],\n", + " )\n", + " embedder_transformer = ToEmbeddings(\n", + " embedder=embedder, batch_size=configs[\"embedder\"][\"batch_size\"]\n", + " )\n", + " data_transformer = Sequential(splitter, embedder_transformer)\n", + " return data_transformer\n", + "\n", + "\n", + "def prepare_database_with_index(\n", + " docs: List[Document],\n", + " index_file: str = \"index.faiss\",\n", + " index_path: Optional[str] = None,\n", + "):\n", + " index_path = index_path or get_adalflow_default_root_path()\n", + " index_path = os.path.join(index_path, index_file)\n", + " if os.path.exists(index_path):\n", + " return None\n", + " db = LocalDB()\n", + " db.load(docs)\n", + " data_transformer = prepare_data_pipeline()\n", + " db.transform(data_transformer, key=\"data_transformer\")\n", + " db.save_state(index_path)" + ], + "metadata": { + "id": "1QE0PCKs4BLz" + }, + "execution_count": 6, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "RAG_PROMPT_TEMPLATE = r\"\"\"\n", + "{{task_desc}}\n", + "\n", + "\n", + "{{input_str}}\n", + "{{context_str}}\n", + "\n", + "\"\"\"\n", + "\n", + "rag_prompt_task_desc = r\"\"\"\n", + "You are a helpful assistant.\n", + "\n", + "Your task is to answer the query that may or may not come with context information.\n", + "When context is provided, you should stick to the context and less on your prior knowledge to answer the query.\n", + "\n", + "Output JSON format:\n", + "{\n", + " \"answer\": \"The answer to the query\",\n", + "}\"\"\"\n", + "\n", + "\n", + "class RAG(Component):\n", + " def __init__(\n", + " self,\n", + " index_file: str = \"index.faiss\",\n", + " index_path: Optional[str] = None,\n", + " configs: dict = configs,\n", + " ):\n", + " super().__init__()\n", + "\n", + " index_path = index_path or get_adalflow_default_root_path()\n", + " index_path = os.path.join(index_path, index_file)\n", + " self.index_path = index_path\n", + "\n", + " if not os.path.exists(index_path):\n", + " self.db = LocalDB()\n", + " self.register_data_transformer()\n", + " self.transformed_docs = []\n", + " else:\n", + " self.db = LocalDB.load_state(index_path)\n", + " self.transformed_docs = self.db.get_transformed_data(\"data_transformer\")\n", + "\n", + " embedder = Embedder(\n", + " model_client=ModelClientType.OPENAI(),\n", + " model_kwargs=configs[\"embedder\"][\"model_kwargs\"],\n", + " )\n", + "\n", + " self.retriever = FAISSRetriever(\n", + " **configs[\"retriever\"],\n", + " embedder=embedder,\n", + " documents=self.transformed_docs,\n", + " document_map_func=lambda doc: doc.vector,\n", + " )\n", + " self.retriever_output_processors = RetrieverOutputToContextStr(deduplicate=True)\n", + "\n", + " self.generator = Generator(\n", + " **configs[\"generator\"],\n", + " prompt_kwargs={\"task_desc_str\": rag_prompt_task_desc},\n", + " output_processors=JsonParser(),\n", + " )\n", + "\n", + " def register_data_transformer(self):\n", + " if \"data_transformer\" not in self.db.get_transformer_keys():\n", + " data_transformer = prepare_data_pipeline()\n", + " self.db.register_transformer(data_transformer, key=\"data_transformer\")\n", + " print(\"Data transformer registered\")\n", + "\n", + " def add_documents(self, docs: List[Document]):\n", + " self.db.extend(docs, apply_transformer=True)\n", + " self.db.save_state(self.index_path)\n", + "\n", + " def get_transformed_docs(self, filter_func=None):\n", + " return self.db.get_transformed_data(\"data_transformer\", filter_func)\n", + "\n", + " def prepare_retriever(self, filter_func=None):\n", + " self.transformed_docs = self.get_transformed_docs(filter_func)\n", + " self.retriever.build_index_from_documents(\n", + " self.transformed_docs, document_map_func=lambda doc: doc.vector\n", + " )\n", + "\n", + " def generate(self, query: str, context: Optional[str] = None) -> Any:\n", + " if not self.generator:\n", + " raise ValueError(\"Generator is not set\")\n", + " prompt_kwargs = {\"context_str\": context, \"input_str\": query}\n", + " response = self.generator(prompt_kwargs=prompt_kwargs)\n", + " return response, context\n", + "\n", + " def call(self, query: str, verbose: bool = False) -> Any:\n", + " retrieved_documents = self.retriever(query)\n", + " for i, retriever_output in enumerate(retrieved_documents):\n", + " retrieved_documents[i].documents = [\n", + " self.transformed_docs[doc_index]\n", + " for doc_index in retriever_output.doc_indices\n", + " ]\n", + " if verbose:\n", + " print(f\"retrieved_documents: \\n {retrieved_documents}\")\n", + "\n", + " context_str = self.retriever_output_processors(retrieved_documents)\n", + " if verbose:\n", + " print(f\"context_str: \\n {context_str}\")\n", + "\n", + " return self.generate(query, context=context_str)" + ], + "metadata": { + "id": "6Mu1HXhy4DIG" + }, + "execution_count": 7, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "# Prepare initial documents\n", + "doc1 = Document(\n", + " meta_data={\"title\": \"Li Yin's profile\"},\n", + " text=\"My name is Li Yin, I love rock climbing\" + \"lots of nonsense text\" * 500,\n", + " id=\"doc1\",\n", + ")\n", + "doc2 = Document(\n", + " meta_data={\"title\": \"Interviewing Li Yin\"},\n", + " text=\"lots of more nonsense text\" * 250\n", + " + \"Li Yin is an AI researcher and a software engineer\"\n", + " + \"lots of more nonsense text\" * 250,\n", + " id=\"doc2\",\n", + ")\n", + "\n", + "# Prepare the database (only runs once)\n", + "prepare_database_with_index([doc1, doc2], index_file=\"index.faiss\")\n", + "\n", + "# Initialize RAG\n", + "rag = RAG(index_file=\"index.faiss\")\n", + "print(rag)\n", + "\n", + "# Query the RAG system\n", + "query = \"What is Li Yin's hobby and profession?\"\n", + "response = rag.call(query)\n", + "print(f\"Response: {response}\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "sPnx4PY34D1j", + "outputId": "f66d6f1a-70bf-40e9-a160-591fcfdcbed3" + }, + "execution_count": 8, + "outputs": [ { - "cell_type": "code", - "source": [ - "def prepare_data_pipeline():\n", - " splitter = TextSplitter(**configs[\"text_splitter\"])\n", - " embedder = Embedder(\n", - " model_client=ModelClientType.OPENAI(),\n", - " model_kwargs=configs[\"embedder\"][\"model_kwargs\"],\n", - " )\n", - " embedder_transformer = ToEmbeddings(\n", - " embedder=embedder, batch_size=configs[\"embedder\"][\"batch_size\"]\n", - " )\n", - " data_transformer = Sequential(splitter, embedder_transformer)\n", - " return data_transformer\n", - "\n", - "def prepare_database_with_index(\n", - " docs: List[Document],\n", - " index_file: str = \"index.faiss\",\n", - " index_path: Optional[str] = None,\n", - "):\n", - " index_path = index_path or get_adalflow_default_root_path()\n", - " index_path = os.path.join(index_path, index_file)\n", - " if os.path.exists(index_path):\n", - " return None\n", - " db = LocalDB()\n", - " db.load(docs)\n", - " data_transformer = prepare_data_pipeline()\n", - " db.transform(data_transformer, key=\"data_transformer\")\n", - " db.save_state(index_path)\n" - ], - "metadata": { - "id": "1QE0PCKs4BLz" - }, - "execution_count": 6, - "outputs": [] + "output_type": "stream", + "name": "stderr", + "text": [ + "Splitting Documents in Batches: 100%|██████████| 1/1 [00:00<00:00, 109.58it/s]\n", + "Batch embedding documents: 100%|██████████| 1/1 [00:01<00:00, 1.33s/it]\n", + "Adding embeddings to documents from batch: 1it [00:00, 6462.72it/s]\n" + ] }, { - "cell_type": "code", - "source": [ - "RAG_PROMPT_TEMPLATE = r\"\"\"\n", - "{{task_desc}}\n", - "\n", - "\n", - "{{input_str}}\n", - "{{context_str}}\n", - "\n", - "\"\"\"\n", - "\n", - "rag_prompt_task_desc = r\"\"\"\n", - "You are a helpful assistant.\n", - "\n", - "Your task is to answer the query that may or may not come with context information.\n", - "When context is provided, you should stick to the context and less on your prior knowledge to answer the query.\n", - "\n", - "Output JSON format:\n", - "{\n", - " \"answer\": \"The answer to the query\",\n", - "}\"\"\"\n", - "\n", - "class RAG(Component):\n", - " def __init__(\n", - " self,\n", - " index_file: str = \"index.faiss\",\n", - " index_path: Optional[str] = None,\n", - " configs: dict = configs,\n", - " ):\n", - " super().__init__()\n", - "\n", - " index_path = index_path or get_adalflow_default_root_path()\n", - " index_path = os.path.join(index_path, index_file)\n", - " self.index_path = index_path\n", - "\n", - " if not os.path.exists(index_path):\n", - " self.db = LocalDB()\n", - " self.register_data_transformer()\n", - " self.transformed_docs = []\n", - " else:\n", - " self.db = LocalDB.load_state(index_path)\n", - " self.transformed_docs = self.db.get_transformed_data(\"data_transformer\")\n", - "\n", - " embedder = Embedder(\n", - " model_client=ModelClientType.OPENAI(),\n", - " model_kwargs=configs[\"embedder\"][\"model_kwargs\"],\n", - " )\n", - "\n", - " self.retriever = FAISSRetriever(\n", - " **configs[\"retriever\"],\n", - " embedder=embedder,\n", - " documents=self.transformed_docs,\n", - " document_map_func=lambda doc: doc.vector,\n", - " )\n", - " self.retriever_output_processors = RetrieverOutputToContextStr(deduplicate=True)\n", - "\n", - " self.generator = Generator(\n", - " **configs[\"generator\"],\n", - " prompt_kwargs={\"task_desc_str\": rag_prompt_task_desc},\n", - " output_processors=JsonParser(),\n", - " )\n", - "\n", - " def register_data_transformer(self):\n", - " if \"data_transformer\" not in self.db.get_transformer_keys():\n", - " data_transformer = prepare_data_pipeline()\n", - " self.db.register_transformer(data_transformer, key=\"data_transformer\")\n", - " print(\"Data transformer registered\")\n", - "\n", - " def add_documents(self, docs: List[Document]):\n", - " self.db.extend(docs, apply_transformer=True)\n", - " self.db.save_state(self.index_path)\n", - "\n", - " def get_transformed_docs(self, filter_func=None):\n", - " return self.db.get_transformed_data(\"data_transformer\", filter_func)\n", - "\n", - " def prepare_retriever(self, filter_func=None):\n", - " self.transformed_docs = self.get_transformed_docs(filter_func)\n", - " self.retriever.build_index_from_documents(\n", - " self.transformed_docs, document_map_func=lambda doc: doc.vector\n", - " )\n", - "\n", - " def generate(self, query: str, context: Optional[str] = None) -> Any:\n", - " if not self.generator:\n", - " raise ValueError(\"Generator is not set\")\n", - " prompt_kwargs = {\"context_str\": context, \"input_str\": query}\n", - " response = self.generator(prompt_kwargs=prompt_kwargs)\n", - " return response, context\n", - "\n", - " def call(self, query: str, verbose: bool = False) -> Any:\n", - " retrieved_documents = self.retriever(query)\n", - " for i, retriever_output in enumerate(retrieved_documents):\n", - " retrieved_documents[i].documents = [\n", - " self.transformed_docs[doc_index]\n", - " for doc_index in retriever_output.doc_indices\n", - " ]\n", - " if verbose:\n", - " print(f\"retrieved_documents: \\n {retrieved_documents}\")\n", - "\n", - " context_str = self.retriever_output_processors(retrieved_documents)\n", - " if verbose:\n", - " print(f\"context_str: \\n {context_str}\")\n", - "\n", - " return self.generate(query, context=context_str)\n" - ], - "metadata": { - "id": "6Mu1HXhy4DIG" - }, - "execution_count": 7, - "outputs": [] + "output_type": "stream", + "name": "stdout", + "text": [ + "Saved the state of the DB to /root/.adalflow/index.faiss\n", + "RAG(\n", + " (db): LocalDB(name='LocalDB', items=[Document(id=doc1, text='My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense te...', meta_data={'title': \"Li Yin's profile\"}, vector=[], parent_doc_id=None, order=None, score=None), Document(id=doc2, text='lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...', meta_data={'title': 'Interviewing Li Yin'}, vector=[], parent_doc_id=None, order=None, score=None)], transformed_items={'data_transformer': [Document(id=59f7f6ad-eb4c-4fdb-8d04-6dba1ee439bc, text='My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense te...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=0, score=None), Document(id=2486725e-47ff-4978-84fc-7937778b0e45, text='textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nons...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=1, score=None), Document(id=96993047-4cff-436d-b8ac-e02da4ae7fec, text='nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlot...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=2, score=None), Document(id=77742f90-0c0c-4143-802d-3557577d4935, text='of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense text...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=3, score=None), Document(id=81ba770e-c5f2-4dc5-98fc-349ab9143ef9, text='textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nons...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=4, score=None), Document(id=dff6f5e3-5929-4e3c-ba5f-79f5116c1fa3, text='nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlot...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=5, score=None), Document(id=1e7888e2-0783-40b2-ab85-067e3ba71fad, text='of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense text...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=6, score=None), Document(id=2deb945f-dfb9-46d3-a60b-dae77e2f5fd8, text='lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=0, score=None), Document(id=3d9c21aa-d583-47fe-b143-710b4bc4a8b2, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=1, score=None), Document(id=a318ffea-2542-4493-ab2d-03d10a94e860, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=2, score=None), Document(id=b5c05820-7545-43a8-a4a3-691c5ccc79d1, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=3, score=None), Document(id=a739cd3e-8826-4e74-afa9-499498115621, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=4, score=None), Document(id=7153cde2-b6ee-4485-91e9-9de2f4bd45ab, text='textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsens...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=5, score=None), Document(id=c3f3ed48-acc2-41b5-b4ac-a6107b651789, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=6, score=None), Document(id=7bfd84e6-0025-4cfa-8c0a-63c9de9a8d4a, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=7, score=None), Document(id=8bece98d-65f0-4dd1-9407-d1c54413bef4, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=8, score=None), Document(id=cf9ab236-af73-4af6-9302-b3c7ffdd9ca7, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=9, score=None)]}, transformer_setups={'data_transformer': Sequential(\n", + " (0): TextSplitter(split_by=word, chunk_size=400, chunk_overlap=200)\n", + " (1): ToEmbeddings(\n", + " batch_size=100\n", + " (embedder): Embedder(\n", + " model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, \n", + " (model_client): OpenAIClient()\n", + " )\n", + " (batch_embedder): BatchEmbedder(\n", + " (embedder): Embedder(\n", + " model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, \n", + " (model_client): OpenAIClient()\n", + " )\n", + " )\n", + " )\n", + " )}, mapper_setups={}, index_path='/root/.adalflow/index.faiss')\n", + " (retriever): FAISSRetriever(\n", + " top_k=5, metric=prob, dimensions=256, total_documents=17\n", + " (embedder): Embedder(\n", + " model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, \n", + " (model_client): OpenAIClient()\n", + " )\n", + " )\n", + " (retriever_output_processors): RetrieverOutputToContextStr(deduplicate=True)\n", + " (generator): Generator(\n", + " model_kwargs={'model': 'gpt-3.5-turbo', 'temperature': 0.3, 'stream': False}, trainable_prompt_kwargs=[]\n", + " (prompt): Prompt(\n", + " template: \n", + " {# task desc #}\n", + " {% if task_desc_str %}\n", + " {{task_desc_str}}\n", + " {% else %}\n", + " You are a helpful assistant.\n", + " {% endif %}\n", + " {#input format#}\n", + " {% if input_format_str %}\n", + " \n", + " {{input_format_str}}\n", + " \n", + " {% endif %}\n", + " {# output format #}\n", + " {% if output_format_str %}\n", + " \n", + " {{output_format_str}}\n", + " \n", + " {% endif %}\n", + " {# tools #}\n", + " {% if tools_str %}\n", + " \n", + " {{tools_str}}\n", + " \n", + " {% endif %}\n", + " {# example #}\n", + " {% if examples_str %}\n", + " \n", + " {{examples_str}}\n", + " \n", + " {% endif %}\n", + " {# chat history #}\n", + " {% if chat_history_str %}\n", + " \n", + " {{chat_history_str}}\n", + " \n", + " {% endif %}\n", + " {#contex#}\n", + " {% if context_str %}\n", + " \n", + " {{context_str}}\n", + " \n", + " {% endif %}\n", + " \n", + " \n", + " {% if input_str %}\n", + " {{input_str}}\n", + " {% endif %}\n", + " \n", + " {# steps #}\n", + " {% if steps_str %}\n", + " \n", + " {{steps_str}}\n", + " \n", + " {% endif %}\n", + " , prompt_kwargs: {'task_desc_str': '\\nYou are a helpful assistant.\\n\\nYour task is to answer the query that may or may not come with context information.\\nWhen context is provided, you should stick to the context and less on your prior knowledge to answer the query.\\n\\nOutput JSON format:\\n{\\n \"answer\": \"The answer to the query\",\\n}'}, prompt_variables: ['examples_str', 'context_str', 'chat_history_str', 'tools_str', 'task_desc_str', 'input_str', 'input_format_str', 'output_format_str', 'steps_str']\n", + " )\n", + " (model_client): OpenAIClient()\n", + " (output_processors): JsonParser()\n", + " )\n", + ")\n", + "Response: (GeneratorOutput(id=None, data={'answer': \"Li Yin's hobby is rock climbing and profession is an AI researcher and a software engineer.\"}, error=None, usage=CompletionUsage(completion_tokens=25, prompt_tokens=2713, total_tokens=2738), raw_response='{\\n \"answer\": \"Li Yin\\'s hobby is rock climbing and profession is an AI researcher and a software engineer.\"\\n}', metadata=None), ' My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense ')\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "# Add more documents at runtime\n", + "doc3 = Document(\n", + " meta_data={\"title\": \"Apple's profile\"},\n", + " text=\"Apple is a cute dog with black and tan fur\" + \"lots of nonsense text\" * 500,\n", + " id=\"doc3\",\n", + ")\n", + "doc4 = Document(\n", + " meta_data={\"title\": \"Apple's characteristics\"},\n", + " text=\"lots of more nonsense text\" * 250\n", + " + \"Apple is energetic, loves to play with her monkey toy\"\n", + " + \"lots of more nonsense text\" * 250,\n", + " id=\"doc4\",\n", + ")\n", + "\n", + "rag.add_documents([doc3, doc4])\n", + "rag.prepare_retriever()\n", + "\n", + "# Test a new query\n", + "query = \"What is Apple's favorite toy?\"\n", + "response = rag.call(query)\n", + "print(f\"Response: {response}\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "bcC1-dCheVEC", + "outputId": "133bab3f-ff2e-40db-99dc-71d64af6283f" + }, + "execution_count": 9, + "outputs": [ { - "cell_type": "code", - "source": [ - "# Prepare initial documents\n", - "doc1 = Document(\n", - " meta_data={\"title\": \"Li Yin's profile\"},\n", - " text=\"My name is Li Yin, I love rock climbing\" + \"lots of nonsense text\" * 500,\n", - " id=\"doc1\",\n", - ")\n", - "doc2 = Document(\n", - " meta_data={\"title\": \"Interviewing Li Yin\"},\n", - " text=\"lots of more nonsense text\" * 250\n", - " + \"Li Yin is an AI researcher and a software engineer\"\n", - " + \"lots of more nonsense text\" * 250,\n", - " id=\"doc2\",\n", - ")\n", - "\n", - "# Prepare the database (only runs once)\n", - "prepare_database_with_index([doc1, doc2], index_file=\"index.faiss\")\n", - "\n", - "# Initialize RAG\n", - "rag = RAG(index_file=\"index.faiss\")\n", - "print(rag)\n", - "\n", - "# Query the RAG system\n", - "query = \"What is Li Yin's hobby and profession?\"\n", - "response = rag.call(query)\n", - "print(f\"Response: {response}\")\n" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "sPnx4PY34D1j", - "outputId": "f66d6f1a-70bf-40e9-a160-591fcfdcbed3" - }, - "execution_count": 8, - "outputs": [ - { - "output_type": "stream", - "name": "stderr", - "text": [ - "Splitting Documents in Batches: 100%|██████████| 1/1 [00:00<00:00, 109.58it/s]\n", - "Batch embedding documents: 100%|██████████| 1/1 [00:01<00:00, 1.33s/it]\n", - "Adding embeddings to documents from batch: 1it [00:00, 6462.72it/s]\n" - ] - }, - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Saved the state of the DB to /root/.adalflow/index.faiss\n", - "RAG(\n", - " (db): LocalDB(name='LocalDB', items=[Document(id=doc1, text='My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense te...', meta_data={'title': \"Li Yin's profile\"}, vector=[], parent_doc_id=None, order=None, score=None), Document(id=doc2, text='lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...', meta_data={'title': 'Interviewing Li Yin'}, vector=[], parent_doc_id=None, order=None, score=None)], transformed_items={'data_transformer': [Document(id=59f7f6ad-eb4c-4fdb-8d04-6dba1ee439bc, text='My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense te...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=0, score=None), Document(id=2486725e-47ff-4978-84fc-7937778b0e45, text='textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nons...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=1, score=None), Document(id=96993047-4cff-436d-b8ac-e02da4ae7fec, text='nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlot...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=2, score=None), Document(id=77742f90-0c0c-4143-802d-3557577d4935, text='of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense text...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=3, score=None), Document(id=81ba770e-c5f2-4dc5-98fc-349ab9143ef9, text='textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nons...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=4, score=None), Document(id=dff6f5e3-5929-4e3c-ba5f-79f5116c1fa3, text='nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlot...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=5, score=None), Document(id=1e7888e2-0783-40b2-ab85-067e3ba71fad, text='of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense text...', meta_data={'title': \"Li Yin's profile\"}, vector='len: 256', parent_doc_id=doc1, order=6, score=None), Document(id=2deb945f-dfb9-46d3-a60b-dae77e2f5fd8, text='lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=0, score=None), Document(id=3d9c21aa-d583-47fe-b143-710b4bc4a8b2, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=1, score=None), Document(id=a318ffea-2542-4493-ab2d-03d10a94e860, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=2, score=None), Document(id=b5c05820-7545-43a8-a4a3-691c5ccc79d1, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=3, score=None), Document(id=a739cd3e-8826-4e74-afa9-499498115621, text='textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonse...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=4, score=None), Document(id=7153cde2-b6ee-4485-91e9-9de2f4bd45ab, text='textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsens...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=5, score=None), Document(id=c3f3ed48-acc2-41b5-b4ac-a6107b651789, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=6, score=None), Document(id=7bfd84e6-0025-4cfa-8c0a-63c9de9a8d4a, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=7, score=None), Document(id=8bece98d-65f0-4dd1-9407-d1c54413bef4, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=8, score=None), Document(id=cf9ab236-af73-4af6-9302-b3c7ffdd9ca7, text='nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of m...', meta_data={'title': 'Interviewing Li Yin'}, vector='len: 256', parent_doc_id=doc2, order=9, score=None)]}, transformer_setups={'data_transformer': Sequential(\n", - " (0): TextSplitter(split_by=word, chunk_size=400, chunk_overlap=200)\n", - " (1): ToEmbeddings(\n", - " batch_size=100\n", - " (embedder): Embedder(\n", - " model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, \n", - " (model_client): OpenAIClient()\n", - " )\n", - " (batch_embedder): BatchEmbedder(\n", - " (embedder): Embedder(\n", - " model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, \n", - " (model_client): OpenAIClient()\n", - " )\n", - " )\n", - " )\n", - " )}, mapper_setups={}, index_path='/root/.adalflow/index.faiss')\n", - " (retriever): FAISSRetriever(\n", - " top_k=5, metric=prob, dimensions=256, total_documents=17\n", - " (embedder): Embedder(\n", - " model_kwargs={'model': 'text-embedding-3-small', 'dimensions': 256, 'encoding_format': 'float'}, \n", - " (model_client): OpenAIClient()\n", - " )\n", - " )\n", - " (retriever_output_processors): RetrieverOutputToContextStr(deduplicate=True)\n", - " (generator): Generator(\n", - " model_kwargs={'model': 'gpt-3.5-turbo', 'temperature': 0.3, 'stream': False}, trainable_prompt_kwargs=[]\n", - " (prompt): Prompt(\n", - " template: \n", - " {# task desc #}\n", - " {% if task_desc_str %}\n", - " {{task_desc_str}}\n", - " {% else %}\n", - " You are a helpful assistant.\n", - " {% endif %}\n", - " {#input format#}\n", - " {% if input_format_str %}\n", - " \n", - " {{input_format_str}}\n", - " \n", - " {% endif %}\n", - " {# output format #}\n", - " {% if output_format_str %}\n", - " \n", - " {{output_format_str}}\n", - " \n", - " {% endif %}\n", - " {# tools #}\n", - " {% if tools_str %}\n", - " \n", - " {{tools_str}}\n", - " \n", - " {% endif %}\n", - " {# example #}\n", - " {% if examples_str %}\n", - " \n", - " {{examples_str}}\n", - " \n", - " {% endif %}\n", - " {# chat history #}\n", - " {% if chat_history_str %}\n", - " \n", - " {{chat_history_str}}\n", - " \n", - " {% endif %}\n", - " {#contex#}\n", - " {% if context_str %}\n", - " \n", - " {{context_str}}\n", - " \n", - " {% endif %}\n", - " \n", - " \n", - " {% if input_str %}\n", - " {{input_str}}\n", - " {% endif %}\n", - " \n", - " {# steps #}\n", - " {% if steps_str %}\n", - " \n", - " {{steps_str}}\n", - " \n", - " {% endif %}\n", - " , prompt_kwargs: {'task_desc_str': '\\nYou are a helpful assistant.\\n\\nYour task is to answer the query that may or may not come with context information.\\nWhen context is provided, you should stick to the context and less on your prior knowledge to answer the query.\\n\\nOutput JSON format:\\n{\\n \"answer\": \"The answer to the query\",\\n}'}, prompt_variables: ['examples_str', 'context_str', 'chat_history_str', 'tools_str', 'task_desc_str', 'input_str', 'input_format_str', 'output_format_str', 'steps_str']\n", - " )\n", - " (model_client): OpenAIClient()\n", - " (output_processors): JsonParser()\n", - " )\n", - ")\n", - "Response: (GeneratorOutput(id=None, data={'answer': \"Li Yin's hobby is rock climbing and profession is an AI researcher and a software engineer.\"}, error=None, usage=CompletionUsage(completion_tokens=25, prompt_tokens=2713, total_tokens=2738), raw_response='{\\n \"answer\": \"Li Yin\\'s hobby is rock climbing and profession is an AI researcher and a software engineer.\"\\n}', metadata=None), ' My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense ')\n" - ] - } - ] + "output_type": "stream", + "name": "stderr", + "text": [ + "Splitting Documents in Batches: 100%|██████████| 1/1 [00:00<00:00, 114.76it/s]\n", + "Batch embedding documents: 100%|██████████| 1/1 [00:00<00:00, 1.35it/s]\n", + "Adding embeddings to documents from batch: 1it [00:00, 1915.21it/s]\n" + ] }, { - "cell_type": "code", - "source": [ - "# Add more documents at runtime\n", - "doc3 = Document(\n", - " meta_data={\"title\": \"Apple's profile\"},\n", - " text=\"Apple is a cute dog with black and tan fur\" + \"lots of nonsense text\" * 500,\n", - " id=\"doc3\",\n", - ")\n", - "doc4 = Document(\n", - " meta_data={\"title\": \"Apple's characteristics\"},\n", - " text=\"lots of more nonsense text\" * 250\n", - " + \"Apple is energetic, loves to play with her monkey toy\"\n", - " + \"lots of more nonsense text\" * 250,\n", - " id=\"doc4\",\n", - ")\n", - "\n", - "rag.add_documents([doc3, doc4])\n", - "rag.prepare_retriever()\n", - "\n", - "# Test a new query\n", - "query = \"What is Apple's favorite toy?\"\n", - "response = rag.call(query)\n", - "print(f\"Response: {response}\")\n" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "bcC1-dCheVEC", - "outputId": "133bab3f-ff2e-40db-99dc-71d64af6283f" - }, - "execution_count": 9, - "outputs": [ - { - "output_type": "stream", - "name": "stderr", - "text": [ - "Splitting Documents in Batches: 100%|██████████| 1/1 [00:00<00:00, 114.76it/s]\n", - "Batch embedding documents: 100%|██████████| 1/1 [00:00<00:00, 1.35it/s]\n", - "Adding embeddings to documents from batch: 1it [00:00, 1915.21it/s]\n" - ] - }, - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Saved the state of the DB to /root/.adalflow/index.faiss\n", - "Response: (GeneratorOutput(id=None, data={'answer': \"Apple's favorite toy is her monkey toy.\"}, error=None, usage=CompletionUsage(completion_tokens=16, prompt_tokens=2647, total_tokens=2663), raw_response='{\\n \"answer\": \"Apple\\'s favorite toy is her monkey toy.\"\\n}', metadata=None), ' Apple is a cute dog with black and tan furlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots textApple is energetic, loves to play with her monkey toylots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textApple is energetic, loves to play with her monkey toylots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more ')\n" - ] - } - ] + "output_type": "stream", + "name": "stdout", + "text": [ + "Saved the state of the DB to /root/.adalflow/index.faiss\n", + "Response: (GeneratorOutput(id=None, data={'answer': \"Apple's favorite toy is her monkey toy.\"}, error=None, usage=CompletionUsage(completion_tokens=16, prompt_tokens=2647, total_tokens=2663), raw_response='{\\n \"answer\": \"Apple\\'s favorite toy is her monkey toy.\"\\n}', metadata=None), ' Apple is a cute dog with black and tan furlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots of nonsense textlots textApple is energetic, loves to play with her monkey toylots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textApple is energetic, loves to play with her monkey toylots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more textLi Yin is an AI researcher and a software engineerlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more ')\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "# View all documents in the database\n", + "print(\"All documents in the database:\")\n", + "for item in rag.db.items:\n", + " print(\n", + " f\"ID: {item.id}, Title: {item.meta_data['title']}, Text: {item.text[:100]}...\"\n", + " )" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "o9TzVv5GeZZ2", + "outputId": "bde56355-186c-4013-d702-b4530f82881b" + }, + "execution_count": 10, + "outputs": [ { - "cell_type": "code", - "source": [ - "# View all documents in the database\n", - "print(\"All documents in the database:\")\n", - "for item in rag.db.items:\n", - " print(f\"ID: {item.id}, Title: {item.meta_data['title']}, Text: {item.text[:100]}...\")\n" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "o9TzVv5GeZZ2", - "outputId": "bde56355-186c-4013-d702-b4530f82881b" - }, - "execution_count": 10, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "All documents in the database:\n", - "ID: doc1, Title: Li Yin's profile, Text: My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense te...\n", - "ID: doc2, Title: Interviewing Li Yin, Text: lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...\n", - "ID: doc3, Title: Apple's profile, Text: Apple is a cute dog with black and tan furlots of nonsense textlots of nonsense textlots of nonsense...\n", - "ID: doc4, Title: Apple's characteristics, Text: lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...\n" - ] - } - ] + "output_type": "stream", + "name": "stdout", + "text": [ + "All documents in the database:\n", + "ID: doc1, Title: Li Yin's profile, Text: My name is Li Yin, I love rock climbinglots of nonsense textlots of nonsense textlots of nonsense te...\n", + "ID: doc2, Title: Interviewing Li Yin, Text: lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...\n", + "ID: doc3, Title: Apple's profile, Text: Apple is a cute dog with black and tan furlots of nonsense textlots of nonsense textlots of nonsense...\n", + "ID: doc4, Title: Apple's characteristics, Text: lots of more nonsense textlots of more nonsense textlots of more nonsense textlots of more nonsense ...\n" + ] } - ] + ] + } + ] } diff --git a/notebooks/tutorials/adalflow_text_splitter.ipynb b/notebooks/tutorials/adalflow_text_splitter.ipynb index 66fb81c7..4008f45a 100644 --- a/notebooks/tutorials/adalflow_text_splitter.ipynb +++ b/notebooks/tutorials/adalflow_text_splitter.ipynb @@ -31,7 +31,7 @@ "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", "\n", "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", "\n", "print(\"API keys have been set.\")" ] @@ -76,16 +76,11 @@ "from adalflow.core.types import Document\n", "\n", "# Configure the splitter settings\n", - "text_splitter = TextSplitter(\n", - " split_by=\"word\",\n", - " chunk_size=5,\n", - " chunk_overlap=1\n", - ")\n", + "text_splitter = TextSplitter(split_by=\"word\", chunk_size=5, chunk_overlap=1)\n", "\n", "# Example document\n", "doc = Document(\n", - " text=\"Example text. More example text. Even more text to illustrate.\",\n", - " id=\"doc1\"\n", + " text=\"Example text. More example text. Even more text to illustrate.\", id=\"doc1\"\n", ")\n", "\n", "# Execute the splitting\n", @@ -135,18 +130,13 @@ "from adalflow.core.types import Document\n", "\n", "# Configure the splitter settings\n", - "text_splitter = TextSplitter(\n", - " split_by=\"token\",\n", - " chunk_size=5,\n", - " chunk_overlap=0\n", - ")\n", + "text_splitter = TextSplitter(split_by=\"token\", chunk_size=5, chunk_overlap=0)\n", "\n", "doc = Document(\n", - " text=\"Example text. More example text. Even more text to illustrate.\",\n", - " id = \"doc1\"\n", - " )\n", + " text=\"Example text. More example text. Even more text to illustrate.\", id=\"doc1\"\n", + ")\n", "\n", - "splitted_docs = (text_splitter.call(documents=[doc]))\n", + "splitted_docs = text_splitter.call(documents=[doc])\n", "\n", "for doc in splitted_docs:\n", " print(doc)" diff --git a/notebooks/tutorials/adalflow_tracing.ipynb b/notebooks/tutorials/adalflow_tracing.ipynb index 014c1b5e..ef3d2b25 100644 --- a/notebooks/tutorials/adalflow_tracing.ipynb +++ b/notebooks/tutorials/adalflow_tracing.ipynb @@ -1,183 +1,184 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "provenance": [] - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] }, - "cells": [ - { - "cell_type": "markdown", - "source": [ - "# Tracing\n", - "\n", - "In particular, we provide two tracing methods to help you develop and improve the Generator:\n", - "\n", - "1. Trace the history change(states) on prompt during your development process. Developers typically go through a long process of prompt optimization and it is frustrating to lose track of the prompt changes when your current change actually makes the performance much worse.\n" - ], - "metadata": { - "id": "lLGpv1fLLIjF" - } - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "id": "sfKEfaYC3Go7" - }, - "outputs": [], - "source": [ - "from IPython.display import clear_output\n", - "\n", - "!pip install -U adalflow[openai,groq,faiss-cpu]\n", - "\n", - "clear_output()\n" - ] - }, - { - "cell_type": "code", - "source": [ - "import os\n", - "from getpass import getpass\n", - "\n", - "# Prompt user to enter their API keys securely\n", - "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", - "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", - "\n", - "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", - "os.environ['GROQ_API_KEY'] = groq_api_key\n", - "\n", - "print(\"API keys have been set.\")\n" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "-4c_AGBt3PlR", - "outputId": "85aba038-ee9c-463d-bdbd-027cbfff0094" - }, - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Please enter your OpenAI API key: ··········\n", - "Please enter your GROQ API key: ··········\n", - "API keys have been set.\n" - ] - } - ] - }, - { - "cell_type": "markdown", - "source": [ - "We created a GeneratorStateLogger to handle the logging and saving into json files. To further simplify developers’s process, we provides a class decorator trace_generator_states where a single line of code can be added to any of your task component. It will automatically track any attributes of type Generator." - ], - "metadata": { - "id": "yWi2uEiE6UIf" - } - }, - { - "cell_type": "code", - "source": [ - "from adalflow.tracing import trace_generator_states\n", - "from adalflow.core import Component, Generator\n", - "import adalflow as adal\n", - "from adalflow.components.model_client import OpenAIClient\n", - "\n", - "template_doc = r\"\"\" You are a doctor User: {{input_str}}\"\"\"\n", - "\n", - "@trace_generator_states()\n", - "class DocQA(adal.Component):\n", - " def __init__(self):\n", - " super(DocQA, self).__init__()\n", - " self.generator = Generator(\n", - " template=template_doc,\n", - " model_client=OpenAIClient(),\n", - " model_kwargs={\"model\": \"gpt-4o-mini\"},\n", - " )\n", - "\n", - " def call(self, query: str) -> str:\n", - " return self.doc(prompt_kwargs={\"input_str\": query}).data\n" - ], - "metadata": { - "id": "qk9pkcCVzdek" - }, - "execution_count": 13, - "outputs": [] - }, - { - "cell_type": "markdown", - "source": [ - "Here is the folder structer of where the trace is generated as a .json file and also an example output below" - ], - "metadata": { - "id": "LAZUSnYn-lnI" - } - }, - { - "cell_type": "markdown", - "source": [ - "![image.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAj4AAADGCAYAAADSbIrxAAAMTGlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnltSIQQIREBK6E0QkRJASggtgPQiiEpIAoQSY0JQsaOLCq5dRLCiqyAuuroCstiwK4ti74sFBWVdLNiVNyGALvvK9+b75s5//znzzzln5pYBgN7Ol0pzUE0AciV5sphgf9aEpGQWqROoAV3ABAZgFF8gl3KiosIBLIPt38vb6wBRtlcclFr/7P+vRUsokgsAQKIgThPKBbkQ/woA3iSQyvIAIEohbz49T6rEayHWkUEHIa5S4gwVblLiNBW+1G8TF8OF+DEAZHU+X5YBgEYP5Fn5ggyoQ4fRAieJUCyB2A9in9zcqUKI50NsA23gnHSlPjvtO52Mv2mmDWny+RlDWBVLfyEHiOXSHP7M/zMd/7vk5igG57CGVT1TFhKjjBnm7XH21DAlVof4vSQtIhJibQBQXCzst1diZqYiJF5lj9oI5FyYM7jOAB0nz4nlDfAxQn5AGMSGEKdLciLCB2wK08VBShuYP7RMnMeLg1gP4iqRPDB2wOaYbGrM4LzX02VczgDfyZf1+6DU/6rIjueo9DHtTBFvQB9zLMiMS4SYCnFAvjghAmINiCPk2bFhAzYpBZnciEEbmSJGGYsFxDKRJNhfpY+VpsuCYgbsd+fKB2PHjmWKeRED+HJeZlyIKlfYYwG/338YC9YjknDiB3VE8gnhg7EIRQGBqthxskgSH6vicT1pnn+MaixuJ82JGrDH/UU5wUreDOI4eX7s4Nj8PLg5Vfp4kTQvKk7lJ16exQ+NUvmD7wPhgAsCAAsoYE0DU0EWELd213fDO1VPEOADGcgAIuAwwAyOSOzvkcBrLCgAf0IkAvKhcf79vSKQD/kvw1glJx7iVFcHkD7Qp1TJBk8gzgVhIAfeK/qVJEMeJIDHkBH/wyM+rAIYQw6syv5/zw+y3xgOZMIHGMXgjCz6oCUxkBhADCEGEW1xA9wH98LD4dUPVmecjXsMxvHNnvCE0EZ4SLhGaCfcmiIulA3zcjxoh/pBA/lJ+z4/uBXUdMX9cW+oDpVxJm4AHHAXOA8H94Uzu0KWO+C3MiusYdp/i+C7FRqwozhRUMoIih/FZvhIDTsN1yEVZa6/z4/K17ShfHOHeobPz/0u+0LYhg23xJZgB7Az2HHsHNaE1QMWdhRrwFqww0o8tOMe9++4wdli+v3JhjrD98y3lVVmUu5U49Tl9FnVlyeakad8GLlTpTNl4ozMPBYHfjFELJ5E4DiK5ezk7AKA8vujer29ju7/riDMlm/cwj8A8D7a19f32zcu9CgAv7jDV8Khb5wNG35a1AA4e0igkOWrOFx5IcA3Bx0+ffrAGJgDGxiPM3ADXsAPBIJQEAniQBKYDL3PhPtcBqaD2WABKAIlYCVYB8rBFrAdVIGfwX5QD5rAcXAaXACXwDVwB+6eDvAc9IC34BOCICSEhjAQfcQEsUTsEWeEjfgggUg4EoMkIalIBiJBFMhsZCFSgqxGypFtSDXyC3IIOY6cQ9qQW8gDpAt5hXxEMVQd1UGNUCt0NMpGOWgYGodOQjPQaWgBughdjpahlegetA49jl5Ar6Ht6HO0FwOYGsbETDEHjI1xsUgsGUvHZNhcrBgrxSqxWqwRrvMVrB3rxj7gRJyBs3AHuIND8HhcgE/D5+LL8HK8Cq/DT+JX8Ad4D/6VQCMYEuwJngQeYQIhgzCdUEQoJewkHCScgs9SB+EtkUhkEq2J7vBZTCJmEWcRlxE3EfcSjxHbiI+IvSQSSZ9kT/ImRZL4pDxSEWkDaQ/pKOkyqYP0nqxGNiE7k4PIyWQJuZBcSt5NPkK+TH5K/kTRpFhSPCmRFCFlJmUFZQelkXKR0kH5RNWiWlO9qXHULOoCahm1lnqKepf6Wk1NzUzNQy1aTaw2X61MbZ/aWbUHah/UtdXt1LnqKeoK9eXqu9SPqd9Sf02j0axofrRkWh5tOa2adoJ2n/Zeg6HhqMHTEGrM06jQqNO4rPGCTqFb0jn0yfQCein9AP0ivVuTommlydXka87VrNA8pHlDs1eLoTVGK1IrV2uZ1m6tc1qd2iRtK+1AbaH2Iu3t2ie0HzEwhjmDyxAwFjJ2ME4xOnSIOtY6PJ0snRKdn3VadXp0tXVddBN0Z+hW6B7WbWdiTCsmj5nDXMHcz7zO/DjCaARnhGjE0hG1Iy6PeKc3Us9PT6RXrLdX75reR32WfqB+tv4q/Xr9ewa4gZ1BtMF0g80Gpwy6R+qM9BopGFk8cv/I24aooZ1hjOEsw+2GLYa9RsZGwUZSow1GJ4y6jZnGfsZZxmuNjxh3mTBMfEzEJmtNjpo8Y+myOKwcVhnrJKvH1NA0xFRhus201fSTmbVZvFmh2V6ze+ZUc7Z5uvla82bzHgsTi/EWsy1qLG5bUizZlpmW6y3PWL6zsrZKtFpsVW/Vaa1nzbMusK6xvmtDs/G1mWZTaXPVlmjLts223WR7yQ61c7XLtKuwu2iP2rvZi+032beNIozyGCUZVTnqhoO6A8ch36HG4YEj0zHcsdCx3vHFaIvRyaNXjT4z+quTq1OO0w6nO2O0x4SOKRzTOOaVs52zwLnC+epY2tigsfPGNox96WLvInLZ7HLTleE63nWxa7PrFzd3N5lbrVuXu4V7qvtG9xtsHXYUexn7rAfBw99jnkeTxwdPN888z/2ef3k5eGV77fbqHGc9TjRux7hH3mbefO9t3u0+LJ9Un60+7b6mvnzfSt+HfuZ+Qr+dfk85tpwszh7OC38nf5n/Qf93XE/uHO6xACwgOKA4oDVQOzA+sDzwfpBZUEZQTVBPsGvwrOBjIYSQsJBVITd4RjwBr5rXE+oeOif0ZJh6WGxYedjDcLtwWXjjeHR86Pg14+9GWEZIIuojQSQvck3kvSjrqGlRv0UTo6OiK6KfxIyJmR1zJpYROyV2d+zbOP+4FXF34m3iFfHNCfSElITqhHeJAYmrE9snjJ4wZ8KFJIMkcVJDMik5IXlncu/EwInrJnakuKYUpVyfZD1pxqRzkw0m50w+PIU+hT/lQCohNTF1d+pnfiS/kt+bxkvbmNYj4ArWC54L/YRrhV0ib9Fq0dN07/TV6Z0Z3hlrMroyfTNLM7vFXHG5+GVWSNaWrHfZkdm7svtyEnP25pJzU3MPSbQl2ZKTU42nzpjaJrWXFknbp3lOWzetRxYm2ylH5JPkDXk68Ee/RWGj+EHxIN8nvyL//fSE6QdmaM2QzGiZaTdz6cynBUEFP83CZwlmNc82nb1g9oM5nDnb5iJz0+Y2zzOft2hex/zg+VULqAuyF/xe6FS4uvDNwsSFjYuMFs1f9OiH4B9qijSKZEU3Fnst3rIEXyJe0rp07NINS78WC4vPlziVlJZ8XiZYdv7HMT+W/di3PH156wq3FZtXEldKVl5f5buqarXW6oLVj9aMX1O3lrW2eO2bdVPWnSt1Kd2ynrpesb69LLysYYPFhpUbPpdnll+r8K/Yu9Fw49KN7zYJN13e7Le5dovRlpItH7eKt97cFrytrtKqsnQ7cXv+9ic7Enac+Yn9U/VOg50lO7/skuxqr4qpOlntXl2923D3ihq0RlHTtSdlz6WfA35uqHWo3baXubdkH9in2Pfsl9Rfru8P2998gH2g9lfLXzceZBwsrkPqZtb11GfWtzckNbQdCj3U3OjVePA3x992NZk2VRzWPbziCPXIoiN9RwuO9h6THus+nnH8UfOU5jsnJpy4ejL6ZOupsFNnTwedPnGGc+boWe+zTec8zx06zz5ff8HtQl2La8vB311/P9jq1lp30f1iwyWPS41t49qOXPa9fPxKwJXTV3lXL1yLuNZ2Pf76zRspN9pvCm923sq59fJ2/u1Pd+bfJdwtvqd5r/S+4f3KP2z/2Nvu1n74QcCDloexD+88Ejx6/lj++HPHoie0J6VPTZ5Wdzp3NnUFdV16NvFZx3Pp80/dRX9q/bnxhc2LX//y+6ulZ0JPx0vZy75Xy17rv971xuVNc29U7/23uW8/vSt+r/++6gP7w5mPiR+ffpr+mfS57Ivtl8avYV/v9uX29Un5Mn7/rwAGlEebdABe7QKAlgQAA54bqRNV58P+gqjOtP0I/CesOkP2FzcAauE/fXQ3/Lu5AcC+HQBYQX16CgBRNADiPAA6duxQHTzL9Z87lYUIzwZbI7+k5aaBf1NUZ9Lv/B7eAqWqCxje/gsy+IMtImMZLAAAAJZlWElmTU0AKgAAAAgABQESAAMAAAABAAEAAAEaAAUAAAABAAAASgEbAAUAAAABAAAAUgEoAAMAAAABAAIAAIdpAAQAAAABAAAAWgAAAAAAAACQAAAAAQAAAJAAAAABAAOShgAHAAAAEgAAAISgAgAEAAAAAQAAAj6gAwAEAAAAAQAAAMYAAAAAQVNDSUkAAABTY3JlZW5zaG90r8HhGAAAAAlwSFlzAAAWJQAAFiUBSVIk8AAAAttpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDYuMC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6ZXhpZj0iaHR0cDovL25zLmFkb2JlLmNvbS9leGlmLzEuMC8iCiAgICAgICAgICAgIHhtbG5zOnRpZmY9Imh0dHA6Ly9ucy5hZG9iZS5jb20vdGlmZi8xLjAvIj4KICAgICAgICAgPGV4aWY6VXNlckNvbW1lbnQ+U2NyZWVuc2hvdDwvZXhpZjpVc2VyQ29tbWVudD4KICAgICAgICAgPGV4aWY6UGl4ZWxYRGltZW5zaW9uPjU3NDwvZXhpZjpQaXhlbFhEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj4xOTg8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8dGlmZjpSZXNvbHV0aW9uVW5pdD4yPC90aWZmOlJlc29sdXRpb25Vbml0PgogICAgICAgICA8dGlmZjpYUmVzb2x1dGlvbj4xNDQvMTwvdGlmZjpYUmVzb2x1dGlvbj4KICAgICAgICAgPHRpZmY6WVJlc29sdXRpb24+MTQ0LzE8L3RpZmY6WVJlc29sdXRpb24+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgrknrQzAAA14ElEQVR4Ae2dB5gURfqHC8EEKCpiDijqKYpZxIQohsOcFfGMp+cJio9i4Mz55MR0KuZMMGBWFEyYzqyYsxjAiIpgRvnPW3++tra3Z3ZC727Pzu97nt1OVdXVb/dM/6a+r6pa9ejRY6aTiYAIiIAIiIAIiEANEJitBq5RlygCIiACIiACIiACnoCEjx4EERABERABERCBmiEg4VMzt1oXKgIiIAIiIAIiIOGjZ0AEREAEREAERKBmCEj41Myt1oWKgAiIgAiIgAhI+OgZEAEREAEREAERqBkCEj41c6t1oSIgAiIgAiIgAhI+egZEQAREQAREQARqhoCET83cal2oCIiACIiACIiAhI+eAREQAREQAREQgZohIOFTM7daFyoCIiACIiACItCmlhF06NDB9enTx3Xu3NnNM888JaF4+OGH3UMPPVRSHiUWAREQAREQARFoXgI1K3wQPQMGDHBt27Yt6w5suummPp/ET1n4lEkEREAEREAEmoVAzbq6aOkpV/TYnUL89O7d2za1FAEREAEREAERyDiBmhU+uLfSMImfNCiqDBEQAREQARFoGgI16+oqNaan0O1A/Jjrq1C6QsemTZvmJk6c6MaMGeOmTp1aKKmOiYAIiIAIiIAIlEmgZlt8yuTVaNkQYt26dfNxR8QfyURABERABERABNInIOGTPtOKSiTuiPgjmQiIgAiIgAiIQPoEatbVlT7K9EqsJP6oZ8+erl+/fr4yjz7yiBs5alR6FVNJIiACIiACIlDlBCR8MngDK4k/WmKJJdwCCyzgr6rLcstl8OpUJREQAREQARFoPgJydTUfe51ZBERABERABESgiQmoxaeJgTfW6ZZeemk322yzuY4dO0anaN++vVtmmWX89qeffup+++03v77kkku6Nm3auF9++cVNnjzZtWvXzm2wwQZu8cUXd4899ph7//33ozJYIe1qq63mVl55Zffdd9+5Dz74wL355pvu999/r5MuaWPOOed0a621lltqqaV8/d5++233xhtvuJ9++ikpeZ191GudddbxeadMmeJefvllN2nSpDppkjaIk6KuXbp08eekvpzz+++/T0qufSIgAiIgAjVEQMKnhdzsc845p96V8OIfMmSI33/VVVe5+++/36+fe+65fvnzzz+7xx9/3G2++eZR3k6dOkV5FllkEXfKKadErrMoUW5lxowZbsSIEe7uu+8Od9dZP/TQQ92GG27oxUd4YObMmV5gDRs2LK94Ovroo93aa6/tWrVqFWb14o1rffHFF+vsZwOBdsghh3gRhwiM24QJE9yFF14oARQHo20REAERqCECFQsfRi6udAwb4635r4xE0yznmmuuOqInPCtB0oiI1q1bh7ujdUTG3nvv7RAx99xzT7TfVvr37+8oI8kQMxtvvLHrutJK7pBcurgde+yxvpUovp/t2Wef3Q0ePNiLrttvv71OkjPPPDNq4apzYNYGrVbnDh3qDvrHP9wff/yRlET7REAEREAEWjiB1rlg2JMrucYPP/zQ/yo3l0q5ZTW16Mn6VBPwKMVef/1134qCmCHAGWNAxAsuuMCNHz/et5DQSoPttttufmn/cHlxH1977TX31FNPeXfSGWec4UUGab7++msvbq688kpHK9Fiiy3mcGFhq666qrvtttu8API7cv923HFHt+2229qmL/Paa6919957r2+Vodca4gdXFq65559/PkqLmOrVq5ffRpzceeedjvPSwrPssstGk8l27drVtzaZu41Wq8022ywqhzJHjhzp880///yRCxA+Cy64oHvuueeitFoRAREQARGoHQIVt/iAyibqLLflp6lFT0u8vcSwYAgRM0aARswUsi+++MIPmhimIa7GhA0jStN6Yy0kw4cPd/fdd5+7/PLLfRZcSoiQ8Dx9+/aNigtdbOy85JJL3HvvvecOPPBAn4bYoksvvdSvI2xCwXTCCSe4d955xx/76KOP3AsvvOAuvvhit9BCC/mWqP3339/hLsPWXXddv+QfIu7ss8+OthF+++23n9tqq638vlVWWSU6phUREAEREIHaIlA/EKLM60f8lNpKwakkesoEnkI2xMwxxxxTr6RPPvnEt5bQYnL++edHoscSfvvtt+7zzz+3TbdSzmVltuKKK0ZxOQQTW1yRHWc5duxY9+OPP/pdtMBY9/0ddtghSvbqq69GoifamVu54ooros1QwORzyVni0aNHu7feesv/0TomEwEREAERqE0CqbT4GLpSW34keoxc8ywRPj/88EO9kyNqcF+ZEc9DTzB6Zs0777x+dxg83CaIA1pzzTUtm2+hiTZiK7jg6EWGmQgKBRTCh0DruIWCy+pCGlx0JoRwu5544onujjvucK+88oovAhFGC5JMBERABESgtgmkKnxAWaz4kejJ/oPXo0cPPwo0vbuKtVC80FU+nxGzE++ZNffcc0fJ99xzT8dfITN3HGkeyY1SzVQfCDSMec/4IwaIetDKNG7cuLy9yHwm/RMBERABEWjxBFJzdYWkGnJ7SfSEtLK5TlzPkUce6ULRQw8uAqQJhmY9ycIJVr/66qukJHn30bJUioVd3anXEUcc4cWNBXFTFi4wxNABBxzgiE9ab731SjmF0oqACIiACLQwAqW9aUq4+HwtPxI9JUBspqS4jKxnFVWghxSigUEQzXAbhYHUtp8BDhdddFG/SRByKcYAixarM2bMGEdgdSGzHl1hGoKu+aO1aqONNnLEHJlLjLIRR/GA6zC/1kVABERABFo2gUYTPmCLix+Jnup4mHbeeeeoooiesIdUdCDPCuLI3F0Ww5Mnab3diCZrYaI31xNPPFEvTbE7nn76accfRvf+k086yXWYbz6/Tc+xpKBrf1D/REAEREAEWjSBRhU+kDPxE19v0VSr/OIY58aMHl5Jxtg4ScbYQWbdu3e31XpLemfNN0uI0C2d1p3PPvssEj477bRTXuGDS2yOOeaIgqKJDbrmmmv8OQiUprzQEGP/vegid/zxx/vdSUHTYXqti4AIiIAItFwCjRLjE8eF+AkFUPy4ttMjQPyNWbkveFpezDbdZBNbjZaHH354FETMztlzIsSMVj2bEwxBkhSgvEmuTBM9nMtcWrfeeqsV48tnIMS44bZi7B7cVauvvro/zLxfv/76q3eT0TU+aTyp6dOnR0Xli0+KEmhFBERABESgxRJo9BafFksuoxf28ccfRzUj1uawww7zA/q99NJLdWJ0okQJK6QlNgbDPXT11Vf7AQppaWG/jbtjWS2Ghm0Cixn/hxGYMcQLrq8nn3zSj/pM7A2Tlpo9+OCDturH7WGQQjuOaCI9E6cSz0MvLaadsN5c/fr18xOXUgATmFrg8sEHH+y2335798wzz3hRRR7r6k5aBkOUiYAIiIAI1CYBCZ8Wdt8RLYgPRAq9ngjw5Y/pIUaNGlXU1TKGD1N6WHAyQsdEhRVAq4n1qmJm+NCYuJTWGAt+RiyZkArT0Qp40003hbv8gInEFDEtBsZozvzFDfHCnF5mTD5KfZmYlXqRP6nFCDY2Savl1VIEREAERKB2CDSJq6t2cDb/lfJivygXz5LU46mU2g0aNMj35oq7hSiX8XCYOsLMenHZNsvTTjvN965ibq+4MZUGriqbqiI8TvqBAwe6m2++OXKZhceJ4WGwQuoXXiPXjRB64IEHHK6vuHEdDGY4YMCAOqNOx9NpWwREQAREoGUTaJVzJSQPyNKyr9sxCWeW7bjjjquoesxizhxa7du39wP40UJi822VUjBdwJdffnk/sSeBy2GX9mLLwTWFmwrxMWHCBN8iVWxe4oRwU1F3hIvFDzWUnxYvWpyYEmPSpEmOIO1yrr+h8+i4CIiACIhAdRGQq6u67lfRtUUgIDIqNVpVmOOqEiPgOpyBvZSyaL0pZyZ1WoDiI0OXcl6lFQEREAERaJkEatbVZT2Jsnhbs1y3LPJSnURABERABESgWAI1K3zC8WaKhdVU6bJct6ZioPOIgAiIgAiIQGMQqFnhw5QINit4Y4Att0zqRN1kIiACIiACIiAC6RNonRvO/+T0i81+icSdECzLGDRt27aNxoZprprj3mKahhEjRjh6PclEQAREQAREQATSJ1CzvbrSR6kSRUAEREAEREAEsk6gZl1dWb8xqp8IiIAIiIAIiED6BCR80meqEkVABERABERABDJKQMInozdG1RIBERABERABEUifgIRP+kxVogiIgAiIgAiIQEYJSPhk9MaoWiIgAiIgAiIgAukTkPBJn6lKFAEREAEREAERyCgBCZ+M3hhVSwREQAREQAREIH0CEj7pM1WJIiACIiACIiACGSUg4ZPRG6NqiYAIiIAIiIAIpE9Awid9pipRBERABERABEQgowQkfDJ6Y1QtERABERABERCB9AlI+KTPVCWKgAiIgAiIgAhklICET0ZvjKolAiIgAiIgAiKQPgEJn/SZqkQREAEREAEREIGMEpDwyeiNUbVEQAREQAREQATSJyDhkz5TlSgCIiACIiACIpBRAhI+Gb0xqpYIiIAIiIAIiED6BCR80meqEkVABERABERABDJKQMInozdG1RIBERABERABEUifgIRP+kxVogiIgAiIgAiIQEYJSPhk9MaoWiIgAiIgAiIgAukTkPBJn6lKFAEREAEREAERyCgBCZ+M3hhVSwREQAREQAREIH0CEj7pM1WJIiACIiACIiACGSUg4ZPRG6NqiYAIiIAIiIAIpE9Awid9pipRBERABERABEQgowQkfDJ6Y1QtERABERABERCB9AlI+KTPVCWKgAiIgAiIgAhklICET0ZvjKolAiIgAiIgAiKQPgEJn/SZqkQREAEREAEREIGMEpDwyeiNUbVEQAREQAREQATSJ9Am/SJVYloEOnTo4Pr06eM6d+7s5plnnpKKffjhh91DDz1UUh4lFgEREAEREIGWTkDCJ6N3GNEzYMAA17Zt27JquOmmm/p8Ej9l4VMmERABERCBFkpArq6M3lhaesoVPXZJiJ/evXvbppYiIAIiIAIiUPMEJHwy+gjg3krDJH7SoKgyREAEREAEWgoBuboyeidLjekpdBmIH3N9FUpX6Ni0adPcxIkT3ZgxY9zUqVMLJdUxERABERABEcgsAQmfzN6abFUMIdatWzfXpUsXd9FFF5UtftZaay3Xvn37ehdnwuqbb76pdyzrO+aaay7Xs2dPt/TSS7uOHTu6KVOmuE8++cSNHz/e/fTTT2VVf8MNN3StW7d23333nZswYUJZZSiTCIiACIhAfQISPvWZaE8BAsQdEX80atSoAqnyHxo0aJBr0yb/Y/fHH3+49957zw0fPty98cYb+QvKwJE555zTHX744W7NNdd0s81W32u83377uVdffdUNHTq0JAG03nrruYEDB/ornDlzptt3333djz/+mIErVhVEQAREoPoJ1P+2rv5r0hU0MoG04o+SqomAWGGFFdwpp5wSvfyT0jX3vvnnn99dcsklbu21104UPdSPa1lttdXcsGHDXKdOnYqu8g477BClbdWqldt5552jba2IgAiIgAhURiD/T+/KylXuFkwgrfij0aNHu6+//trxcl9yySXdcsst5xBVs88+u6eHu2ehhRZyxx13XOZoDhkyxM0777xRvT788EP37LPPunfffddfBy695Zdf3h9v166dI/0BBxzgaNEqZHPPPbdbZpll6iTp1auXu+GGG+rs04YIiIAIiEB5BCR8yuOmXCkQeOKJJ9ynn35apyTcYAidVVZZxe+n9Wfbbbd1d999d510zbnRd4893HzzzRdV4bacgBsZuP6IyUHUbbfddm6vvfbywo64JlxWV199dZQvaWWnnXby6cNjCCxiq95///1wt9ZFQAREQATKICBXVxnQlKXxCMyYMcO7uZ577rnoJP369cvrTiIRLiVcTrvssosjLe6lQnFEUcGzVojVWX/99d0eOUGz5557OlpraHnJZ1tvs0106IUXXqgjeqIDuZW77rrLIe7MNt98c1vNu9xkk02iY88//3y03rdv32hdKyIgAiIgAuUTUItP+eyUsxEJ0HPsmmuu8aKG3k30mnr00UfrnXHXXXd1tJKEQsdiZAiSPvXUUwsGFh966KEOl1o8OJmg4scee8zH5/z+++/ReWmBQihhpLniiiuiY0kr1157rS8fdx51pCXrtddeS0rqll12WceI3dj06dN97zkYkJd81LEhV1liwdopAiIgAiIQEWjxwoeRiysdw8Zoaf4rI9H4S3ox0Zqyzjrr+JPRWhIXPrjAdtttt7yVIWYIYXLSSScluon69+/vBVVSAYiNjTfe2HVdaSV3SC6dWTgS9meffea7rtuxpOX333/vu7YvtdRS/jBl5hM+tDiZPfXUU+6HH35wkydNcosvsYTv2r7FFlu4+++/35JoKQIiIAIiUAaBFu/qYq4qBEulJtFTKcHS8xMobLbgggvaql/S5XvvvfeO9r3zzju+2/ixxx7rbr/9dmetNLTOsC9uO+64oyNo2Ayhcdppp7mjjz7aPfLII741h2OdcsHVBx98sCWr0zvryy+/jPYXWvnqq6+iw/HrsAO05qy66qq26WOE2LgvN2Ck2VZbbWWrWoqACIiACJRJoMULH7hUKn4kesp8uirMRk8ps3DeMkSCjXPD8ccff9wHRD/99NO+ZWfEiBHuqKOOisQPgci0DoUWxsxcddVV7rzzznOvvPKK45x0U7/yyiuj5BtssEG0bq4odkzKtcYUY1988UWULAyKjnbmVjbbbDPfqsM+BJUN5Pjggw9G17Hooou6BRZYIMymdREQAREQgRIJ1ITwgUm54keip8QnKsXkH3/8cVSadXFnB+4m4n4wgqEvvvhivx7+Y+TksKUvFD4rrrhi1HMKV1SS+2js2LHRoIGMzGxd+OfOrZuFLTm2L2kZtgxZfFA83dZbbx3t4lk1I6bnzTfftE23++67R+taEQEREAERKJ1AzQgf0JQqfiR6Sn+g0syB4DAjkNjM4n7Yfvnll6MWETtuy+uuu85WI+HCDkZaNiOOKJ9dcMEF7vrrr/d/NnLyn7Vw0XhD+fLbfuKFIguuw/YxGOJiiy3mN7nOeNf9O+64w5L63mfRhlZEQAREQARKJtDig5vjROzXdEMBzxI9cXJNvx0O5Pfbb79FFUAomNGyk89++eUX3yJEbyr+aG1h30q5gGWzyZMn22q95Ysvvuj4Cy2cewvXUzEWpvsxYe6usBUHV1t4rZTPuEA///yzQwjyh3CL16uYeiiNCIiACIiAczUnfLjpDYkfiZ5sfDTo3m1G926zsCWooZniERHW1R0BMjE3w3wYp1Osu8rOzaShTEaKFTsNxSKLLGLZHa61uDGGkBkjWBNjFLc55pgj2sUUFhI+EQ6tiIAIiEBJBGrK1RWSyef2kugJKTXv+sorrxxVgK7jZhb4y3bSTO+WjmUoGCwf4sWMKTFKsTCgefHFFy8qayh8wusgM4MthoMlEsuEoIr/heMM0U0/X6xQURVSIhEQARGoYQI1K3y453HxI9GTnU8C81wxTYPZnXfeaat+XBzb6Ny5s63WWzLVgwVBEztjrS3hNBnFihcrfEzQvZweVgRKF7IlcmPwhOKKoOnQGG3ajC74uLTy/dnghYggpsOQiYAIiIAIlE6gJl1dISZze7EvXA/TaL1pCdDqMWjQoOikxNXQ1dyMMXu23HJLv7nGGmv4ION4XAwHmRTUbMqUKbbq3V220b17d1utt2TwQ+t+vv/++7tp06a5zz//3Asom6D0wAMPdEceeWS9vLbjoIMOslU/gvRHH30UbXOdjARtxhxlhebjYjoOG5WaAR1vueUWy6qlCIiACIhAkQRqusXHGCF4JHqMRvMuGcSPiTzD8WqGDh1ap1JMJcGoxhgtOoMHD65znI2uXbu6Hj16RPvHjRsXrdOyZ0IJNxPzc8WNObNM9OAaQ/SYMe6PGSMyn3HGGfWmvOA4gyGGgdQ33nijZfNLWm3MhcX1FBI9ZKB3l/VuI8Cb1iSZCIiACIhAaQRqvsWnNFxKnSYBWmToJo54scH5wsBlznXPPff4Xk3x8w4bNixqFerWrZsfywfxijsL0cP8W9aN/PXXX3e33XZbVARj/4wcOTIa+ZlRnBEoTz75pHczIZiYqNSMQQRDY5TnPn36RG4uWm3oOo9woZfYwgsv7N107dq1i7IxJlHczcUUFGaU2ZAhjogxMsGDYBsyZEhD2XRcBERABEQgICDhE8DQatMSYOLNfEa8CxOVhrObh2mfeeYZ98ADD0QuL+JowtGYLS1ChIlK48ZYOauvvno0TQSxOknxOoipm266KZ7dz/91/PHHO0QXhmAjGDsMyLZMU3MtRowkHRqxRWGr1ujRo8PDedeJMcK9huHmk4mACIiACJRGQK6u0ngpdYUEzFUTL4b9jLHD9A60xhDPkk/0WF6mlTj99NPruKHsGIHADG6Iu8mCgu2YLZmb6/LLL/etPLbPlnSTx6V16aWX2q46S8pEUF122WXR9BJ1EgQbHXJTZuyzzz7BHlcnOJku9WEMUp2EsY1wCgu66YeDOcaSalMEREAERCCBQKtcs344GG1CEu1qDgLEjWTZCMTNkuEuw11FXA7iiYEAcWkVa3QPp/UGAcaAgaXk5RzMJca4Qx07dnSIJkTJEUccUWd0Z+p0wgkneIFXbL2UTgREQAREIF0CEj7p8kytNAmf1FA2W0H0/DrrrLPqdGdnAMW426vZKqgTi4AIiEANEpCrK6M3PexFlLUqZrluWWJFoHX//v0ds8ZjjM9zzjnnZKmKqosIiIAI1BwBBTdn9JbTMmCBs1mrInWTFU+A7vhMS0Hvsoam2Ci+VKUUAREQAREoh4BafMqh1gR56L1jM4I3wemKPgV1CkcvLjpjjSeku7pET40/BLp8ERCBTBBonRsT5ORM1ESVqEOAHk6MVkycCIGzzT03E+4tRkweMWKEXuB17pQ2REAEREAEqomAgpur6W6priIgAiIgAiIgAhURkKurInzKLAIiIAIiIAIiUE0EJHyq6W6priIgAiIgAiIgAhURkPCpCJ8yi4AIiIAIiIAIVBMBCZ9quluqqwiIgAiIgAiIQEUEJHwqwqfMIiACIiACIiAC1URAwqea7pbqKgIiIAIiIAIiUBEBCZ+K8CmzCIiACIiACIhANRGQ8Kmmu6W6ioAIiIAIiIAIVERAwqcifMosAiIgAiIgAiJQTQQkfKrpbqmuIiACIiACIiACFRGQ8KkInzKLgAiIgAiIgAhUEwEJn2q6W6qrCIiACIiACIhARQQkfCrCp8wiIAIiIAIiIALVREDCp5ruluoqAiIgAiIgAiJQEQEJn4rwKbMIiIAIiIAIiEA1EZDwqaa7pbqKgAiIgAiIgAhUREDCpyJ8yiwCIiACIiACIlBNBCR8quluqa4iIAIiIAIiIAIVEZDwqQifMouACIiACIiACFQTAQmfarpbqqsIiIAIiIAIiEBFBCR8KsKnzCIgAiIgAiIgAtVEQMKnmu6W6ioCIiACIiACIlARAQmfivApswiIgAiIgAiIQDURkPCppruluoqACIiACIiACFREQMKnInzKLAIiIAIiIAIiUE0EJHyq6W6priIgAiIgAiIgAhURkPCpCJ8yi4AIiIAIiIAIVBOBNtVUWdU12wQ6dOjg+vTp4zp37uzmmWeeiir78MMPu4ceeqiiMpRZBERABERABOIEJHziRLRdFgFEz4ABA1zbtm3Lyh/PtOmmm/pdEj9xMtoWAREQARGohIBcXZXQU96IAC09aYkeKxTx07t3b9vUUgREQAREQAQqJiDhUzFCFQAB3FuNYRI/jUFVZYqACIhA7RKQ8Knde5/qlVca02OVIbYnbhI/cSLaFgEREAERKJeAhE+55JSvUQgQ0yPx0yhoVagIiIAIiECOgISPHoPMEZD4ydwtUYVEQAREoMUQkPBpMbeyZV2IxE/Lup+6GhEQARHICgEJn6zcCdWjHgGJn3pItEMEREAERKBCAhI+FQJU9sYlIPHTuHxVugiIQOMSWGKJJdzAgQPdX//618Y9kUovmoAGMCwalRI2FwHED0bvrtBs246Hx7QuAiIgAlkgcNppp7n27du7DTfc0E2dOtX973//y0K1aroOEj41ffur5+JN3JjYsZrbth23/Vpmh0DPnj1dv379fIUefeQRN3LUqOxUrpFqwkjmQ4YM8aV/+umnjpdf2nbQQQe5tdZayxd78cUXu1deeSXtU5RUXlNcc0kVykjiNm3+fM0usMACGalVbVdDrq7avv9VdfWIm3xd3avqQmqssjT184XPX5fllquJq+cXvl3zsssu2yjXvFyOpZ1jkUUWaZRzlFJoU1xzKfXJStprrrnGff755+6FF15wDzzwQFaqVdP1+FOK1jQGXXy1ELCWHWvpqZZ6q54iIAK1SYAfa0k/2GqTRjauWsInG/dBtZhF4IwzzsgMi06dOrl1113XzTvvvO7ll192b731lvvjjz/cQgst5Nq1a+d+++03hxsjn/ErfPXVV3csP/zwQ/fSSy+577//PjE55+IX88yZM93EiRN9Gs679tprO1pM2DdhwgQfI5BYQLCTuq2zzjpuqaWWclOmTPF1nzRpUpDiz1VG3F5wwQX9Dn6V/vTTT26FFVbw1/3DDz+422677c/Es9bIg4uFaUomT57s3n33XX998YRLL720m2222VzHjh2jQ1zjMsss47dhB8O4wRduiy66qPviiy/ciy++6L788st4smi7nGuIMhe5wnV07drV0cqCS+ejjz7yzwPMQrNng7qb4eqwa+Z+5HsGFl54YX/faMXhfr/55pvuq6++smL8cvbZZ/fPAxtzzz13dIy8nGPGjBnuk08+ifaHK6U8F2G+htZLuWZ7zikThq1atfLPOGzffvtt99RTT9U7XTFc6mWatQMmlE0dP/vsM/faa68V/MxaOdxvnvHlunRxM37/3b366qvunXfe8Z9/S1PskuefzzLGNfMdkmR8nlZcccXouedzxZ8sfQKtevToMTP9YlVirRFobsFy3HHHpYYcocL12JeVFYwoGTFihNtqq63c/PPP73fvuuuudjha8mI8+eSToxdUdCC38vXXX7sTTzyx3gvt6quvdjbtB7EbnJ+XRGic/5FcjMywYcPC3XXWjz76aP8i4YUSGgLjnHPO8SIi3P+vf/3LrbHGGn7XmDFj3CabbOLmmmuuKEl4fbnvCte/f/86xy3hd9995/773/9GcSZMWHvdddfZ4cTlVVdd5e6///7oGC982Cy++OLRPlvhhc51P/bYY7YrWpZyDVGmEla22247t8ceezhER9wQfhdccIH74IMP/KHrr7++jiCJpycOJx7vs88++zgm+W3dunU8uXv//ffd0KFDo+dlm222caQvZH379vUCKExT6nMR5m1ovZRrDp/zK664wh1wwAFeHHOOqbln6O8HHhidrhQuUaZZK8svv7w76qijos9peJwA43PPPde98cYb4e5ofccdd3S77babC2NzOMjnb/To0e6mm26K0hazQvwVwgv797//7V1eYb7VVlvNf67sOyU89uOPPzqYjR8/Ptyt9QoJKManQoDK3rII8Ivr/PPPryd6uErEBEG6SV9QRoEvy6E5gUErTZLRuoJA4Is5n11yySX1RA9pOT8uvp122ikx67HHHutbDOKih8S8tAcPHuz4Us9nvHxD0ROmQ4wdeeSReY/PN9987vjjj3dLLrmkz5ZUh7C8+DrnRZgliR7SwvXQQw91RxxxRDxrne1C11AnYZEbe+65p/vb3/6WKHooYrHFFnNnnnmmXxZTZMiFe/Kf//zHIWaSRA/ldcm1OJDGjof5850vnqbS5yLfeYrdH6+P5TswJ3JoWYlbOVzCMvjBcOqpp+b9nPLD5JRTTvGtmmE+1rfYYgvHPY+LHo5xHbvssosX52yXY3EWtOjyoy3fdwo/IAYMGOA222yzck6nPHkIyNWVB4x21yaBw3IvV3vJ8AuPX1rPPvusf6FvtNFGeQWN0Tr77LNdh5wIwH7++WfHr2FcFriP9ttvPy8cKJ+X+GGHHWbZ6iz50v3ll1/8L0POjbuNL0hrcUD4xF1Qe++9d9TDh6b0u+66yz3xxBPejcUxXtDY7rvv7u677z5ffp2TztrgmnHH0MROHTDybr755rNSON/kT8sT7rutt97arbfeev5FwZc6L1lahXCTnXTSSf7FtuWWWzpaizBcONYS9N577/l9vPx4uVuL1+8518Ktt97qr59zI9Zwm2Gca4MNNnBPPvmk3076l3QNSeka2kd9QqH48ccfu3HjxnmXCWOy0FLGveSPlxfXTYsV7jxaDf/xj3/4U/Ac8FxgoWtshx12cJ1z7g2Me0bw69ixYz1LWhVXWWUV/7LFRYXwpMXrwQcfjNyKh/zzn67TrJYE8tFNGnah+zCt58JXMs+/Uq45XgT1/Tzngvog9yyZ27gcLlYuAppnyYQLLSbDhw93r7/+un+G9s99Bu3zefjhh3uu06ZN89lpefn73/9uRbnnn3/e3Xnnnf7eUCfcx1i3bt38vcFtVqnxzJgYoh7jcvfxpZxbne8a/sydiUjkMwcvWeUEJHwqZ6gSWgiBNddcM3qRcEm8rHgZYc8995wXG7iwVl55Zb8v/o9fisTVYIgGvkRNPPCl/vTTT/tma16UxIDwRUvcTtxw6yCKvvnmG3+Ilzy/Ymky50tyzjnn9AKKFypGr6Ftt93Wr/PvhBNO8OKEdWIKuAZrbufc+++/f153GW6VZ555hqyRhe4u6nv66adHxy688EKHgEHUYRYvxLq5ElZddVU2veFmiL8wcHcgFDCunRcSsT0Y4orrR1BZ120EBS/5fLESSdfgCyvxn4k1siEmcJ3YOeFA66C5rcwtibDDvv32W7/kH9cUv2b2hwH6V155pRdV7Md46SIoeMlixKlgxGBZWdNz4tKcodxn2+8T5v6l+VxYmUnLUq45zA8XXvz2nNuxcrhYXp4lhCKGkPhnThzaZ5A4Nz6DsEbUIo569+7t7rjjDp+eZ8xECPsQTGYMTUBZVreDDz7Yt8TY8XKWuHYRyWa0QnEfMeIJb7zxRv99QT35ccCzQKyhrHIC9dsZKy9TJYhAVRLgV7bZpJxQMdFj+1gifOzlF+5nvVevXtEuvrTsC9d28uvz8ccft03fchFtBCvEH8RfBgS52i9TkhJka8avUTMLwrRtWxJPYUZLQpIhJuKih3T80hw5cqT/46URN+J07JcoX9AmAuLp8m2vv/760SG6+5roiXbmVhAzdg5+BXfv3j08HK3nu4YoQQkrc8wxR8HUvJxokWNJYK61yBXMFBy8+eabI67WWzE47Fu9bDufK8SOJy3Tei6Syk5jH6I5/pxTbiVcaFk1u+GGG+p9Bvns0ppoxo8PjGcw3kpkaWx5+eWXR5/9Up9xKyNcIvwKGSL39ttv988Xz1j8+6RQXh0rTKBN4cM6KgK1QyD8Mru/wHgbuFKSLHw5IUDC8ix92OMm7Pljx1mG7pBw//Tp06PYozDweqWVVoqS5TtvWGaYN8qYW4n3ILJjBOSGg+MRd4DriXgeWp+wkEkpAoAWqPBXL4IxyWhx4dro7YXxguPXe9zyXUM8XTHbCL59993XJ+WaeFHz0sSFaC8t3CrlGuWHRgsALYbEOSEgYWMWrtu+hpZpPRcNnafc4+baiuevhIsFESNw4uXYedhvPGlRxHChmtHqkvTZ5Tg/XnheuT98jvL10LOyCi3Jy2fann9aD3FR47a0chGB/MnSJSDhky5PlVbFBAh6NLMmZ9tuaBl22SYtAdINWTxPQ+nzHbc4AI7jbuOvkJlYKZQmfowveuJd6OGE8EnLQvGHmDBBkVQ+MTYmfMylmJQurX285IjpsfgmulXjmjnkkEO8KwsBhEskbIkr9dy4XOi9xHQG9jIutYx86Zviuch37kr3l8OFHx7WavPrr7/mrQItKXfffXed4+FnEcFIB4OGDFdipa4nejYS78fni/tFDB5/xMhRNkI7n0BsqH46np+AhE9+NjpSYwTCF0+pLQfhF2ex2BpypRRbjn3ZF5ve4hiKTQ8XhJzF4Vg+flXjfmJZjpiiHHqDmYVBubYvXIZxM2HrWpgm7XXcG8Qw0bPLfpnDj9YZRCB/BIszOm+pBk/cmvEWMpiaACyXK3Vp7Oei1OstNn25XMLn0+Lfij2nxQUVm5504bNbSr4wLeKZ2KNBgwZFXd45Tn0I4ueP54+4PXsmwvxaL4+AhE953JSrBRL4JRcsbN25ebEx5k6xFqaNxxHkKyMpliVf2kL7EQwm2hiLp6EWCIuVKVRmeCwMPuba7r33Xt/SYc3xpL322mujoNIwb0PrYctaQy/q8MVGa0xTmY28S9dyujsTI4UrxAQksWG0BjFGSylGTzATPbRCMEYULUx2f7inoyqY16yxn4tSrrWUtOVyCQfptM9xsefF5WTGmEwElzdkofu3obSFjuNuoyURMc9wDPQWxI1sn2ni+S699NI6Pc4KladjDROQ8GmYkVLUCIFvcwOoWVdXmrEZqbVYI0gTUUCTNX8MdMZ2UxiDB5oooM78ikzTcMOY0aX60Ucftc2Kl4g0XvR8ySMCaOHIF8RpYwRx0lBoVlyJIgtgMEEbPBJ33zHHHBP1tuJlxX0v9p4TH2L3jPgoxmoJhWSRVSqYrLGfi4InL/NgJVzgZ5/BUlvKEOAWGM1wDrfcckuZV1B+Nlo0Eb/88SzRm5JxgzDc8PSOTEtslV/LlpFTvbpaxn3UVaRAgCHtzfjllWT8kuRLKcnwy5vhGslnxC/kKyNfnkL7w3rnG9yQ/LSolBOfE75EksQgx8M0heqadGz6rHFUOJZvVGLqTfdxs7TFnZUbLonzoMWFPxMpdpwWJwYuNKHD/bSu55am0DIc4BLhlyR6Ko1jauznotD1lXusUi7WEkhrHGNMJRnuI4QNfwwZgNmYUqwjYgs9z/k6B5C3FGMwTnu+4p9bnitGiA57vTGujywdAsnf4OmUrVJEoKoIhEPRxwft40J4udGLx1wc8YsLe5Hg/ghf1JaWfcSN0HQdjnljx8tZht1zaRUJB92z8viyprWCYEoLELZjDS3DQNG99tqrTnLE1HnnnVcnniQeuxS24CT1lrk11zpmxjgp8VGt4Y77w8Qi8RtJQw1YGWktcUXSEsUfYx8lmQkfjoWcwhiTMMjYygjdMjDs2bOnHfJLenYhrApZGPNhAzyG6Rv7uQjPxXpD1xxPn7RdKRcGeDQjSDguUriXobi2YQQYZsJizLgf4fNm5bEk1oYhHSg7bgxGmXSv4+lsmyEQ7Pni+4L1uP0UuHStfpaGeobDWth+Wk5xy8aNz15a3znxsqttW66uartjqm+jEeBLl19+9mXCaLmM78EXFF8aCIb4F2lYGcYN6dWrl0/DS5ph8xkXh27Y/IJkgEQGouOY+fPJU6nRCoMQsAH+6NXF4HvMa0VrAi0RjFdiv2KZdqOU3ij0pjI3AKPXMhgiI+ES14JIsTgVuw7ioybOGsiPfeQ3oxcXgzMS18CkrfRYYRyg7bff3n8p8+XPAInUnRGv4cRw/faFjdBgoLemMLoV25hHtAIwFhJ1ZmBGngX28fLBeClRXzPcJbiwEMlcE12V6X6PmCJ+hIEcEQoWi0KMBy0UcOP5o+XDhB5lJr0UmbjVRCJzrFEerY70WKI+jf1c2LXasqFrtnSFlpVyYbwpRgpHgPDHDwzGhsJNyWeY58wCmWFkI4CzjqBhkEKMnl2XX3aZG5uLuYIz23x+LaCZchCWFo+FSOVecM/5ccSApw0ZIg0RxjOCK4s4OT5XfGcwuSrPV9jSSPyeGc8HY1vxjOD2tXqTj1gz9lNvniuMOD1GG8d4PhhRvpZNwqeW776uvR4BetnwhWK/3Hjx2cuvXuKEHXwBMlUD+flCQ4CEIwBbFr7E0hA9Vh69rhhp2qamIEaJv7gRy8AItaUYc2jh9rGXPGOl2HgpVo695NmmVYsZ1c0QC7ROkB8mNhw/LyAL3mVSVsSBjZHSKycg+QuNc3B/QrdEeDztdQZDJJDbXCa89BAY/MWNF2zcENLmuoEJf2Hg7EUXXeR785CPF1X8noVM4UZZYddmBCNuGwzxSQ8zjBnObdymxnwu/Mli/xq65ljyxM1KuCCMGWWZFhueN7gwF1rcCCZnctvQCGInjsaYEu8XjlpuacnLvHUmethvnzXuE62WxQgfWkJ5nhmpHGGLCObHi/2AsfOx5IdAOAYY3ykmjPlRgPuceDnGI7L9fEb5scN51sqJNjPS1Lrwmc1gaCkCIvD/g/gxJQKtFLx4QuMLhBdJ6N4Ij7POL0vcIrTAhF+MHKM8Ak4RPMy4HFpYZrgeppkZBEvzCzU0fu0PHDjQD3YWP0Y6Yh94IdJtNqxXuD4jVqaVT9Al0zUwmnXcYEILUNgLJi4UET28zMJzxcvhhc4UH0ncSAs3ZkFPGlk6LDfpGvilywus2L8w3oJf4ZyXloj480C9eNETJ5I0azyCMYz7In1oXEu+NDBnRnVmLDdjzrbQGM2XVqlCVupzgbgqlpOlQ9CZ5bseOx4+2+G6HWdZKRem7uBZQmTGz8GzyDx0tJCEItLOz+f7rLPOSuwZybPO55tA9PhwFybGeUYQUMUarYCIKIRq+Bxbfnqb0dLIxMahIcot/Ve5lh3rycln3K6ZGC/qjD0X9FIjTa1bq5xyrPvtXutEdP1lEeAXe3Mav/DSNn454SbilxhuDAJQaZK2aRv4Eu3bt2/B0zK+D24iXpy8qOxLqWCmFA7S4oQA4Xz0BEkSQ+WchiBjmv2J48Hlw3UVa/z6xtVHq87kyZP9vET5eMDtL3/5i683LzDcKOUa7g9rrSq2DF7qSYY7A3cdbgTcdfZiSUrLPmvJoSUOlwQvzqQ8XC9l80zhGk1Kk+8cuF+5J7QaUH6hYRIaei4IyreWo3zni+9HrIZd+Yu95ng5SduVcLHyEHOdc/E33C+EarEGT7jCFxdUQ886bime0/hnzebJ47y0yoY/EuJ1MfcxgofnPino3fLAmecqLuCoN89ofD9uY8RSoTKt7Ja+lKurpd9hXV9JBGgyZiZkfmHxBRafRHTnnXeOyivmZUwafp01tdEcX0xze6n1ouWIF105lsQzXzlwy+IvU1oL+CvWEHa0BliLQL58XG8xz1NSfl5kSS1hSWkbei7yCdGksmxfvCWs2Gu2/IWWlXCxchEAcRFgxwotEQnxiV8LpTf3YjxNGAPXEF9EayHhGpZNWUnXRb2T9ocDgIbl1OK6hE8t3nVdcyIBfkHRVE9wLq0lgwcPrvMlhO/eAgQpoDkETWLFtbMgAVyXpbT4mAuhYKEt9CAxV2EQbTGXaW6WYtLWWhqeOwuI5tpLaXGqNVZNeb0SPk1JW+fKNAHcNzQTY7T8EJdCjARjadBMbAHPHOfX0/Dhw1mVZZyAmvaLv0GIvnDsmOJzKmVIgDix7t27+0lnCXjGaKEptjUnLEvr6RNQcHP6TFVilRJA5NA6QCyOGfE9+NFD0UN8SmPEFNk5tRQBEahuAnRFZyyd0M1lXeer+8paRu3V4tMy7qOuIiUC/OJlkDLG7KAXDQG2BFgS20IvCb68mE9JJgIiIAL5CBDvQ7A67nN+UI0fP94PHpovvfY3LQH16mpa3i32bIwNg3uoOYwYg7BXSXPUQecUAREQARGoDgJydVXHfcp8LcORepu6ss157qa+Vp1PBERABESgMgISPpXxU+5ZBOgJYhMENiUUzllqL5SmrJ/OJQIiIAIikC0CrXODO52crSqpNtVIgAHXGCiPwb4Y5M7mhWqsa8G9xVxEI0aMaHBgscaqg8oVAREQARGoPgKK8am+e6Yai4AIiIAIiIAIlElArq4ywSmbCIiACIiACIhA9RGQ8Km+e6Yai4AIiIAIiIAIlElAwqdMcMomAiIgAiIgAiJQfQQkfKrvnqnGIiACIiACIiACZRKQ8CkTnLKJgAiIgAiIgAhUHwEJn+q7Z6qxCIiACIiACIhAmQQkfMoEp2wiIAIiIAIiIALVR0DCp/rumWosAiIgAiIgAiJQJgEJnzLBKZsIiIAIiIAIiED1EZDwqb57phqLgAiIgAiIgAiUSUDCp0xwyiYCIiACIiACIlB9BCR8qu+eqcYiIAIiIAIiIAJlEpDwKROcsomACIiACIiACFQfAQmf6rtnqrEIiIAIiIAIiECZBCR8ygSnbCIgAiIgAiIgAtVHQMKn+u6ZaiwCIiACIiACIlAmAQmfMsEpmwiIgAiIgAiIQPURkPCpvnumGouACIiACIiACJRJQMKnTHDKJgIiIAIiIAIiUH0EJHyq756pxiIgAiIgAiIgAmUSkPApE5yyiYAIiIAIiIAIVB+B/wO9N/2l2KPKEwAAAABJRU5ErkJggg==)" - ], - "metadata": { - "id": "cVofNXVW-EMo" - } + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Tracing\n", + "\n", + "In particular, we provide two tracing methods to help you develop and improve the Generator:\n", + "\n", + "1. Trace the history change(states) on prompt during your development process. Developers typically go through a long process of prompt optimization and it is frustrating to lose track of the prompt changes when your current change actually makes the performance much worse.\n" + ], + "metadata": { + "id": "lLGpv1fLLIjF" + } + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "sfKEfaYC3Go7" + }, + "outputs": [], + "source": [ + "from IPython.display import clear_output\n", + "\n", + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", + "\n", + "clear_output()" + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "# Prompt user to enter their API keys securely\n", + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", + "groq_api_key = getpass(\"Please enter your GROQ API key: \")\n", + "\n", + "# Set environment variables\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", + "os.environ[\"GROQ_API_KEY\"] = groq_api_key\n", + "\n", + "print(\"API keys have been set.\")" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "-4c_AGBt3PlR", + "outputId": "85aba038-ee9c-463d-bdbd-027cbfff0094" + }, + "execution_count": 2, + "outputs": [ { - "cell_type": "code", - "source": [ - "'''\n", - " {\n", - " \"doc\": [\n", - " {\n", - " \"prompt_states\": {\n", - " \"type\": \"Prompt\",\n", - " \"data\": {\n", - " \"_components\": {\n", - " \"_ordered_dict\": true,\n", - " \"data\": []\n", - " },\n", - " \"_parameters\": {\n", - " \"_ordered_dict\": true,\n", - " \"data\": []\n", - " },\n", - " \"training\": false,\n", - " \"teacher_mode\": false,\n", - " \"tracing\": false,\n", - " \"name\": \"Prompt\",\n", - " \"_init_args\": {\n", - " \"template\": null,\n", - " \"prompt_kwargs\": {}\n", - " },\n", - " \"template\": \" You are a doctor User: {{input_str}}\",\n", - " \"prompt_variables\": [\n", - " \"input_str\"\n", - " ],\n", - " \"prompt_kwargs\": {}\n", - " }\n", - " },\n", - " \"time_stamp\": \"2024-11-29T12:36:33.302956\"\n", - " }\n", - " ]\n", - "}\n", - "'''" - ], - "metadata": { - "id": "dPd9i6_t7ERJ" - }, - "execution_count": null, - "outputs": [] + "output_type": "stream", + "name": "stdout", + "text": [ + "Please enter your OpenAI API key: ··········\n", + "Please enter your GROQ API key: ··········\n", + "API keys have been set.\n" + ] } - ] + ] + }, + { + "cell_type": "markdown", + "source": [ + "We created a GeneratorStateLogger to handle the logging and saving into json files. To further simplify developers’s process, we provides a class decorator trace_generator_states where a single line of code can be added to any of your task component. It will automatically track any attributes of type Generator." + ], + "metadata": { + "id": "yWi2uEiE6UIf" + } + }, + { + "cell_type": "code", + "source": [ + "from adalflow.tracing import trace_generator_states\n", + "from adalflow.core import Component, Generator\n", + "import adalflow as adal\n", + "from adalflow.components.model_client import OpenAIClient\n", + "\n", + "template_doc = r\"\"\" You are a doctor User: {{input_str}}\"\"\"\n", + "\n", + "\n", + "@trace_generator_states()\n", + "class DocQA(adal.Component):\n", + " def __init__(self):\n", + " super(DocQA, self).__init__()\n", + " self.generator = Generator(\n", + " template=template_doc,\n", + " model_client=OpenAIClient(),\n", + " model_kwargs={\"model\": \"gpt-4o-mini\"},\n", + " )\n", + "\n", + " def call(self, query: str) -> str:\n", + " return self.doc(prompt_kwargs={\"input_str\": query}).data" + ], + "metadata": { + "id": "qk9pkcCVzdek" + }, + "execution_count": 13, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "Here is the folder structer of where the trace is generated as a .json file and also an example output below" + ], + "metadata": { + "id": "LAZUSnYn-lnI" + } + }, + { + "cell_type": "markdown", + "source": [ + "![image.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAj4AAADGCAYAAADSbIrxAAAMTGlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnltSIQQIREBK6E0QkRJASggtgPQiiEpIAoQSY0JQsaOLCq5dRLCiqyAuuroCstiwK4ti74sFBWVdLNiVNyGALvvK9+b75s5//znzzzln5pYBgN7Ol0pzUE0AciV5sphgf9aEpGQWqROoAV3ABAZgFF8gl3KiosIBLIPt38vb6wBRtlcclFr/7P+vRUsokgsAQKIgThPKBbkQ/woA3iSQyvIAIEohbz49T6rEayHWkUEHIa5S4gwVblLiNBW+1G8TF8OF+DEAZHU+X5YBgEYP5Fn5ggyoQ4fRAieJUCyB2A9in9zcqUKI50NsA23gnHSlPjvtO52Mv2mmDWny+RlDWBVLfyEHiOXSHP7M/zMd/7vk5igG57CGVT1TFhKjjBnm7XH21DAlVof4vSQtIhJibQBQXCzst1diZqYiJF5lj9oI5FyYM7jOAB0nz4nlDfAxQn5AGMSGEKdLciLCB2wK08VBShuYP7RMnMeLg1gP4iqRPDB2wOaYbGrM4LzX02VczgDfyZf1+6DU/6rIjueo9DHtTBFvQB9zLMiMS4SYCnFAvjghAmINiCPk2bFhAzYpBZnciEEbmSJGGYsFxDKRJNhfpY+VpsuCYgbsd+fKB2PHjmWKeRED+HJeZlyIKlfYYwG/338YC9YjknDiB3VE8gnhg7EIRQGBqthxskgSH6vicT1pnn+MaixuJ82JGrDH/UU5wUreDOI4eX7s4Nj8PLg5Vfp4kTQvKk7lJ16exQ+NUvmD7wPhgAsCAAsoYE0DU0EWELd213fDO1VPEOADGcgAIuAwwAyOSOzvkcBrLCgAf0IkAvKhcf79vSKQD/kvw1glJx7iVFcHkD7Qp1TJBk8gzgVhIAfeK/qVJEMeJIDHkBH/wyM+rAIYQw6syv5/zw+y3xgOZMIHGMXgjCz6oCUxkBhADCEGEW1xA9wH98LD4dUPVmecjXsMxvHNnvCE0EZ4SLhGaCfcmiIulA3zcjxoh/pBA/lJ+z4/uBXUdMX9cW+oDpVxJm4AHHAXOA8H94Uzu0KWO+C3MiusYdp/i+C7FRqwozhRUMoIih/FZvhIDTsN1yEVZa6/z4/K17ShfHOHeobPz/0u+0LYhg23xJZgB7Az2HHsHNaE1QMWdhRrwFqww0o8tOMe9++4wdli+v3JhjrD98y3lVVmUu5U49Tl9FnVlyeakad8GLlTpTNl4ozMPBYHfjFELJ5E4DiK5ezk7AKA8vujer29ju7/riDMlm/cwj8A8D7a19f32zcu9CgAv7jDV8Khb5wNG35a1AA4e0igkOWrOFx5IcA3Bx0+ffrAGJgDGxiPM3ADXsAPBIJQEAniQBKYDL3PhPtcBqaD2WABKAIlYCVYB8rBFrAdVIGfwX5QD5rAcXAaXACXwDVwB+6eDvAc9IC34BOCICSEhjAQfcQEsUTsEWeEjfgggUg4EoMkIalIBiJBFMhsZCFSgqxGypFtSDXyC3IIOY6cQ9qQW8gDpAt5hXxEMVQd1UGNUCt0NMpGOWgYGodOQjPQaWgBughdjpahlegetA49jl5Ar6Ht6HO0FwOYGsbETDEHjI1xsUgsGUvHZNhcrBgrxSqxWqwRrvMVrB3rxj7gRJyBs3AHuIND8HhcgE/D5+LL8HK8Cq/DT+JX8Ad4D/6VQCMYEuwJngQeYQIhgzCdUEQoJewkHCScgs9SB+EtkUhkEq2J7vBZTCJmEWcRlxE3EfcSjxHbiI+IvSQSSZ9kT/ImRZL4pDxSEWkDaQ/pKOkyqYP0nqxGNiE7k4PIyWQJuZBcSt5NPkK+TH5K/kTRpFhSPCmRFCFlJmUFZQelkXKR0kH5RNWiWlO9qXHULOoCahm1lnqKepf6Wk1NzUzNQy1aTaw2X61MbZ/aWbUHah/UtdXt1LnqKeoK9eXqu9SPqd9Sf02j0axofrRkWh5tOa2adoJ2n/Zeg6HhqMHTEGrM06jQqNO4rPGCTqFb0jn0yfQCein9AP0ivVuTommlydXka87VrNA8pHlDs1eLoTVGK1IrV2uZ1m6tc1qd2iRtK+1AbaH2Iu3t2ie0HzEwhjmDyxAwFjJ2ME4xOnSIOtY6PJ0snRKdn3VadXp0tXVddBN0Z+hW6B7WbWdiTCsmj5nDXMHcz7zO/DjCaARnhGjE0hG1Iy6PeKc3Us9PT6RXrLdX75reR32WfqB+tv4q/Xr9ewa4gZ1BtMF0g80Gpwy6R+qM9BopGFk8cv/I24aooZ1hjOEsw+2GLYa9RsZGwUZSow1GJ4y6jZnGfsZZxmuNjxh3mTBMfEzEJmtNjpo8Y+myOKwcVhnrJKvH1NA0xFRhus201fSTmbVZvFmh2V6ze+ZUc7Z5uvla82bzHgsTi/EWsy1qLG5bUizZlpmW6y3PWL6zsrZKtFpsVW/Vaa1nzbMusK6xvmtDs/G1mWZTaXPVlmjLts223WR7yQ61c7XLtKuwu2iP2rvZi+032beNIozyGCUZVTnqhoO6A8ch36HG4YEj0zHcsdCx3vHFaIvRyaNXjT4z+quTq1OO0w6nO2O0x4SOKRzTOOaVs52zwLnC+epY2tigsfPGNox96WLvInLZ7HLTleE63nWxa7PrFzd3N5lbrVuXu4V7qvtG9xtsHXYUexn7rAfBw99jnkeTxwdPN888z/2ef3k5eGV77fbqHGc9TjRux7hH3mbefO9t3u0+LJ9Un60+7b6mvnzfSt+HfuZ+Qr+dfk85tpwszh7OC38nf5n/Qf93XE/uHO6xACwgOKA4oDVQOzA+sDzwfpBZUEZQTVBPsGvwrOBjIYSQsJBVITd4RjwBr5rXE+oeOif0ZJh6WGxYedjDcLtwWXjjeHR86Pg14+9GWEZIIuojQSQvck3kvSjrqGlRv0UTo6OiK6KfxIyJmR1zJpYROyV2d+zbOP+4FXF34m3iFfHNCfSElITqhHeJAYmrE9snjJ4wZ8KFJIMkcVJDMik5IXlncu/EwInrJnakuKYUpVyfZD1pxqRzkw0m50w+PIU+hT/lQCohNTF1d+pnfiS/kt+bxkvbmNYj4ArWC54L/YRrhV0ib9Fq0dN07/TV6Z0Z3hlrMroyfTNLM7vFXHG5+GVWSNaWrHfZkdm7svtyEnP25pJzU3MPSbQl2ZKTU42nzpjaJrWXFknbp3lOWzetRxYm2ylH5JPkDXk68Ee/RWGj+EHxIN8nvyL//fSE6QdmaM2QzGiZaTdz6cynBUEFP83CZwlmNc82nb1g9oM5nDnb5iJz0+Y2zzOft2hex/zg+VULqAuyF/xe6FS4uvDNwsSFjYuMFs1f9OiH4B9qijSKZEU3Fnst3rIEXyJe0rp07NINS78WC4vPlziVlJZ8XiZYdv7HMT+W/di3PH156wq3FZtXEldKVl5f5buqarXW6oLVj9aMX1O3lrW2eO2bdVPWnSt1Kd2ynrpesb69LLysYYPFhpUbPpdnll+r8K/Yu9Fw49KN7zYJN13e7Le5dovRlpItH7eKt97cFrytrtKqsnQ7cXv+9ic7Enac+Yn9U/VOg50lO7/skuxqr4qpOlntXl2923D3ihq0RlHTtSdlz6WfA35uqHWo3baXubdkH9in2Pfsl9Rfru8P2998gH2g9lfLXzceZBwsrkPqZtb11GfWtzckNbQdCj3U3OjVePA3x992NZk2VRzWPbziCPXIoiN9RwuO9h6THus+nnH8UfOU5jsnJpy4ejL6ZOupsFNnTwedPnGGc+boWe+zTec8zx06zz5ff8HtQl2La8vB311/P9jq1lp30f1iwyWPS41t49qOXPa9fPxKwJXTV3lXL1yLuNZ2Pf76zRspN9pvCm923sq59fJ2/u1Pd+bfJdwtvqd5r/S+4f3KP2z/2Nvu1n74QcCDloexD+88Ejx6/lj++HPHoie0J6VPTZ5Wdzp3NnUFdV16NvFZx3Pp80/dRX9q/bnxhc2LX//y+6ulZ0JPx0vZy75Xy17rv971xuVNc29U7/23uW8/vSt+r/++6gP7w5mPiR+ffpr+mfS57Ivtl8avYV/v9uX29Un5Mn7/rwAGlEebdABe7QKAlgQAA54bqRNV58P+gqjOtP0I/CesOkP2FzcAauE/fXQ3/Lu5AcC+HQBYQX16CgBRNADiPAA6duxQHTzL9Z87lYUIzwZbI7+k5aaBf1NUZ9Lv/B7eAqWqCxje/gsy+IMtImMZLAAAAJZlWElmTU0AKgAAAAgABQESAAMAAAABAAEAAAEaAAUAAAABAAAASgEbAAUAAAABAAAAUgEoAAMAAAABAAIAAIdpAAQAAAABAAAAWgAAAAAAAACQAAAAAQAAAJAAAAABAAOShgAHAAAAEgAAAISgAgAEAAAAAQAAAj6gAwAEAAAAAQAAAMYAAAAAQVNDSUkAAABTY3JlZW5zaG90r8HhGAAAAAlwSFlzAAAWJQAAFiUBSVIk8AAAAttpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDYuMC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6ZXhpZj0iaHR0cDovL25zLmFkb2JlLmNvbS9leGlmLzEuMC8iCiAgICAgICAgICAgIHhtbG5zOnRpZmY9Imh0dHA6Ly9ucy5hZG9iZS5jb20vdGlmZi8xLjAvIj4KICAgICAgICAgPGV4aWY6VXNlckNvbW1lbnQ+U2NyZWVuc2hvdDwvZXhpZjpVc2VyQ29tbWVudD4KICAgICAgICAgPGV4aWY6UGl4ZWxYRGltZW5zaW9uPjU3NDwvZXhpZjpQaXhlbFhEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj4xOTg8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8dGlmZjpSZXNvbHV0aW9uVW5pdD4yPC90aWZmOlJlc29sdXRpb25Vbml0PgogICAgICAgICA8dGlmZjpYUmVzb2x1dGlvbj4xNDQvMTwvdGlmZjpYUmVzb2x1dGlvbj4KICAgICAgICAgPHRpZmY6WVJlc29sdXRpb24+MTQ0LzE8L3RpZmY6WVJlc29sdXRpb24+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgrknrQzAAA14ElEQVR4Ae2dB5gURfqHC8EEKCpiDijqKYpZxIQohsOcFfGMp+cJio9i4Mz55MR0KuZMMGBWFEyYzqyYsxjAiIpgRvnPW3++tra3Z3ZC727Pzu97nt1OVdXVb/dM/6a+r6pa9ejRY6aTiYAIiIAIiIAIiEANEJitBq5RlygCIiACIiACIiACnoCEjx4EERABERABERCBmiEg4VMzt1oXKgIiIAIiIAIiIOGjZ0AEREAEREAERKBmCEj41Myt1oWKgAiIgAiIgAhI+OgZEAEREAEREAERqBkCEj41c6t1oSIgAiIgAiIgAhI+egZEQAREQAREQARqhoCET83cal2oCIiACIiACIiAhI+eAREQAREQAREQgZohIOFTM7daFyoCIiACIiACItCmlhF06NDB9enTx3Xu3NnNM888JaF4+OGH3UMPPVRSHiUWAREQAREQARFoXgI1K3wQPQMGDHBt27Yt6w5suummPp/ET1n4lEkEREAEREAEmoVAzbq6aOkpV/TYnUL89O7d2za1FAEREAEREAERyDiBmhU+uLfSMImfNCiqDBEQAREQARFoGgI16+oqNaan0O1A/Jjrq1C6QsemTZvmJk6c6MaMGeOmTp1aKKmOiYAIiIAIiIAIlEmgZlt8yuTVaNkQYt26dfNxR8QfyURABERABERABNInIOGTPtOKSiTuiPgjmQiIgAiIgAiIQPoEatbVlT7K9EqsJP6oZ8+erl+/fr4yjz7yiBs5alR6FVNJIiACIiACIlDlBCR8MngDK4k/WmKJJdwCCyzgr6rLcstl8OpUJREQAREQARFoPgJydTUfe51ZBERABERABESgiQmoxaeJgTfW6ZZeemk322yzuY4dO0anaN++vVtmmWX89qeffup+++03v77kkku6Nm3auF9++cVNnjzZtWvXzm2wwQZu8cUXd4899ph7//33ozJYIe1qq63mVl55Zffdd9+5Dz74wL355pvu999/r5MuaWPOOed0a621lltqqaV8/d5++233xhtvuJ9++ikpeZ191GudddbxeadMmeJefvllN2nSpDppkjaIk6KuXbp08eekvpzz+++/T0qufSIgAiIgAjVEQMKnhdzsc845p96V8OIfMmSI33/VVVe5+++/36+fe+65fvnzzz+7xx9/3G2++eZR3k6dOkV5FllkEXfKKadErrMoUW5lxowZbsSIEe7uu+8Od9dZP/TQQ92GG27oxUd4YObMmV5gDRs2LK94Ovroo93aa6/tWrVqFWb14o1rffHFF+vsZwOBdsghh3gRhwiM24QJE9yFF14oARQHo20REAERqCECFQsfRi6udAwb4635r4xE0yznmmuuOqInPCtB0oiI1q1bh7ujdUTG3nvv7RAx99xzT7TfVvr37+8oI8kQMxtvvLHrutJK7pBcurgde+yxvpUovp/t2Wef3Q0ePNiLrttvv71OkjPPPDNq4apzYNYGrVbnDh3qDvrHP9wff/yRlET7REAEREAEWjiB1rlg2JMrucYPP/zQ/yo3l0q5ZTW16Mn6VBPwKMVef/1134qCmCHAGWNAxAsuuMCNHz/et5DQSoPttttufmn/cHlxH1977TX31FNPeXfSGWec4UUGab7++msvbq688kpHK9Fiiy3mcGFhq666qrvtttu8API7cv923HFHt+2229qmL/Paa6919957r2+Vodca4gdXFq65559/PkqLmOrVq5ffRpzceeedjvPSwrPssstGk8l27drVtzaZu41Wq8022ywqhzJHjhzp880///yRCxA+Cy64oHvuueeitFoRAREQARGoHQIVt/iAyibqLLflp6lFT0u8vcSwYAgRM0aARswUsi+++MIPmhimIa7GhA0jStN6Yy0kw4cPd/fdd5+7/PLLfRZcSoiQ8Dx9+/aNigtdbOy85JJL3HvvvecOPPBAn4bYoksvvdSvI2xCwXTCCSe4d955xx/76KOP3AsvvOAuvvhit9BCC/mWqP3339/hLsPWXXddv+QfIu7ss8+OthF+++23n9tqq638vlVWWSU6phUREAEREIHaIlA/EKLM60f8lNpKwakkesoEnkI2xMwxxxxTr6RPPvnEt5bQYnL++edHoscSfvvtt+7zzz+3TbdSzmVltuKKK0ZxOQQTW1yRHWc5duxY9+OPP/pdtMBY9/0ddtghSvbqq69GoifamVu54ooros1QwORzyVni0aNHu7feesv/0TomEwEREAERqE0CqbT4GLpSW34keoxc8ywRPj/88EO9kyNqcF+ZEc9DTzB6Zs0777x+dxg83CaIA1pzzTUtm2+hiTZiK7jg6EWGmQgKBRTCh0DruIWCy+pCGlx0JoRwu5544onujjvucK+88oovAhFGC5JMBERABESgtgmkKnxAWaz4kejJ/oPXo0cPPwo0vbuKtVC80FU+nxGzE++ZNffcc0fJ99xzT8dfITN3HGkeyY1SzVQfCDSMec/4IwaIetDKNG7cuLy9yHwm/RMBERABEWjxBFJzdYWkGnJ7SfSEtLK5TlzPkUce6ULRQw8uAqQJhmY9ycIJVr/66qukJHn30bJUioVd3anXEUcc4cWNBXFTFi4wxNABBxzgiE9ab731SjmF0oqACIiACLQwAqW9aUq4+HwtPxI9JUBspqS4jKxnFVWghxSigUEQzXAbhYHUtp8BDhdddFG/SRByKcYAixarM2bMGEdgdSGzHl1hGoKu+aO1aqONNnLEHJlLjLIRR/GA6zC/1kVABERABFo2gUYTPmCLix+Jnup4mHbeeeeoooiesIdUdCDPCuLI3F0Ww5Mnab3diCZrYaI31xNPPFEvTbE7nn76accfRvf+k086yXWYbz6/Tc+xpKBrf1D/REAEREAEWjSBRhU+kDPxE19v0VSr/OIY58aMHl5Jxtg4ScbYQWbdu3e31XpLemfNN0uI0C2d1p3PPvssEj477bRTXuGDS2yOOeaIgqKJDbrmmmv8OQiUprzQEGP/vegid/zxx/vdSUHTYXqti4AIiIAItFwCjRLjE8eF+AkFUPy4ttMjQPyNWbkveFpezDbdZBNbjZaHH354FETMztlzIsSMVj2bEwxBkhSgvEmuTBM9nMtcWrfeeqsV48tnIMS44bZi7B7cVauvvro/zLxfv/76q3eT0TU+aTyp6dOnR0Xli0+KEmhFBERABESgxRJo9BafFksuoxf28ccfRzUj1uawww7zA/q99NJLdWJ0okQJK6QlNgbDPXT11Vf7AQppaWG/jbtjWS2Ghm0Cixn/hxGYMcQLrq8nn3zSj/pM7A2Tlpo9+OCDturH7WGQQjuOaCI9E6cSz0MvLaadsN5c/fr18xOXUgATmFrg8sEHH+y2335798wzz3hRRR7r6k5aBkOUiYAIiIAI1CYBCZ8Wdt8RLYgPRAq9ngjw5Y/pIUaNGlXU1TKGD1N6WHAyQsdEhRVAq4n1qmJm+NCYuJTWGAt+RiyZkArT0Qp40003hbv8gInEFDEtBsZozvzFDfHCnF5mTD5KfZmYlXqRP6nFCDY2Savl1VIEREAERKB2CDSJq6t2cDb/lfJivygXz5LU46mU2g0aNMj35oq7hSiX8XCYOsLMenHZNsvTTjvN965ibq+4MZUGriqbqiI8TvqBAwe6m2++OXKZhceJ4WGwQuoXXiPXjRB64IEHHK6vuHEdDGY4YMCAOqNOx9NpWwREQAREoGUTaJVzJSQPyNKyr9sxCWeW7bjjjquoesxizhxa7du39wP40UJi822VUjBdwJdffnk/sSeBy2GX9mLLwTWFmwrxMWHCBN8iVWxe4oRwU1F3hIvFDzWUnxYvWpyYEmPSpEmOIO1yrr+h8+i4CIiACIhAdRGQq6u67lfRtUUgIDIqNVpVmOOqEiPgOpyBvZSyaL0pZyZ1WoDiI0OXcl6lFQEREAERaJkEatbVZT2Jsnhbs1y3LPJSnURABERABESgWAI1K3zC8WaKhdVU6bJct6ZioPOIgAiIgAiIQGMQqFnhw5QINit4Y4Att0zqRN1kIiACIiACIiAC6RNonRvO/+T0i81+icSdECzLGDRt27aNxoZprprj3mKahhEjRjh6PclEQAREQAREQATSJ1CzvbrSR6kSRUAEREAEREAEsk6gZl1dWb8xqp8IiIAIiIAIiED6BCR80meqEkVABERABERABDJKQMInozdG1RIBERABERABEUifgIRP+kxVogiIgAiIgAiIQEYJSPhk9MaoWiIgAiIgAiIgAukTkPBJn6lKFAEREAEREAERyCgBCZ+M3hhVSwREQAREQAREIH0CEj7pM1WJIiACIiACIiACGSUg4ZPRG6NqiYAIiIAIiIAIpE9Awid9pipRBERABERABEQgowQkfDJ6Y1QtERABERABERCB9AlI+KTPVCWKgAiIgAiIgAhklICET0ZvjKolAiIgAiIgAiKQPgEJn/SZqkQREAEREAEREIGMEpDwyeiNUbVEQAREQAREQATSJyDhkz5TlSgCIiACIiACIpBRAhI+Gb0xqpYIiIAIiIAIiED6BCR80meqEkVABERABERABDJKQMInozdG1RIBERABERABEUifgIRP+kxVogiIgAiIgAiIQEYJSPhk9MaoWiIgAiIgAiIgAukTkPBJn6lKFAEREAEREAERyCgBCZ+M3hhVSwREQAREQAREIH0CEj7pM1WJIiACIiACIiACGSUg4ZPRG6NqiYAIiIAIiIAIpE9Awid9pipRBERABERABEQgowQkfDJ6Y1QtERABERABERCB9AlI+KTPVCWKgAiIgAiIgAhklICET0ZvjKolAiIgAiIgAiKQPgEJn/SZqkQREAEREAEREIGMEpDwyeiNUbVEQAREQAREQATSJ9Am/SJVYloEOnTo4Pr06eM6d+7s5plnnpKKffjhh91DDz1UUh4lFgEREAEREIGWTkDCJ6N3GNEzYMAA17Zt27JquOmmm/p8Ej9l4VMmERABERCBFkpArq6M3lhaesoVPXZJiJ/evXvbppYiIAIiIAIiUPMEJHwy+gjg3krDJH7SoKgyREAEREAEWgoBuboyeidLjekpdBmIH3N9FUpX6Ni0adPcxIkT3ZgxY9zUqVMLJdUxERABERABEcgsAQmfzN6abFUMIdatWzfXpUsXd9FFF5UtftZaay3Xvn37ehdnwuqbb76pdyzrO+aaay7Xs2dPt/TSS7uOHTu6KVOmuE8++cSNHz/e/fTTT2VVf8MNN3StW7d23333nZswYUJZZSiTCIiACIhAfQISPvWZaE8BAsQdEX80atSoAqnyHxo0aJBr0yb/Y/fHH3+49957zw0fPty98cYb+QvKwJE555zTHX744W7NNdd0s81W32u83377uVdffdUNHTq0JAG03nrruYEDB/ornDlzptt3333djz/+mIErVhVEQAREoPoJ1P+2rv5r0hU0MoG04o+SqomAWGGFFdwpp5wSvfyT0jX3vvnnn99dcsklbu21104UPdSPa1lttdXcsGHDXKdOnYqu8g477BClbdWqldt5552jba2IgAiIgAhURiD/T+/KylXuFkwgrfij0aNHu6+//trxcl9yySXdcsst5xBVs88+u6eHu2ehhRZyxx13XOZoDhkyxM0777xRvT788EP37LPPunfffddfBy695Zdf3h9v166dI/0BBxzgaNEqZHPPPbdbZpll6iTp1auXu+GGG+rs04YIiIAIiEB5BCR8yuOmXCkQeOKJJ9ynn35apyTcYAidVVZZxe+n9Wfbbbd1d999d510zbnRd4893HzzzRdV4bacgBsZuP6IyUHUbbfddm6vvfbywo64JlxWV199dZQvaWWnnXby6cNjCCxiq95///1wt9ZFQAREQATKICBXVxnQlKXxCMyYMcO7uZ577rnoJP369cvrTiIRLiVcTrvssosjLe6lQnFEUcGzVojVWX/99d0eOUGz5557OlpraHnJZ1tvs0106IUXXqgjeqIDuZW77rrLIe7MNt98c1vNu9xkk02iY88//3y03rdv32hdKyIgAiIgAuUTUItP+eyUsxEJ0HPsmmuu8aKG3k30mnr00UfrnXHXXXd1tJKEQsdiZAiSPvXUUwsGFh966KEOl1o8OJmg4scee8zH5/z+++/ReWmBQihhpLniiiuiY0kr1157rS8fdx51pCXrtddeS0rqll12WceI3dj06dN97zkYkJd81LEhV1liwdopAiIgAiIQEWjxwoeRiysdw8Zoaf4rI9H4S3ox0Zqyzjrr+JPRWhIXPrjAdtttt7yVIWYIYXLSSScluon69+/vBVVSAYiNjTfe2HVdaSV3SC6dWTgS9meffea7rtuxpOX333/vu7YvtdRS/jBl5hM+tDiZPfXUU+6HH35wkydNcosvsYTv2r7FFlu4+++/35JoKQIiIAIiUAaBFu/qYq4qBEulJtFTKcHS8xMobLbgggvaql/S5XvvvfeO9r3zzju+2/ixxx7rbr/9dmetNLTOsC9uO+64oyNo2Ayhcdppp7mjjz7aPfLII741h2OdcsHVBx98sCWr0zvryy+/jPYXWvnqq6+iw/HrsAO05qy66qq26WOE2LgvN2Ck2VZbbWWrWoqACIiACJRJoMULH7hUKn4kesp8uirMRk8ps3DeMkSCjXPD8ccff9wHRD/99NO+ZWfEiBHuqKOOisQPgci0DoUWxsxcddVV7rzzznOvvPKK45x0U7/yyiuj5BtssEG0bq4odkzKtcYUY1988UWULAyKjnbmVjbbbDPfqsM+BJUN5Pjggw9G17Hooou6BRZYIMymdREQAREQgRIJ1ITwgUm54keip8QnKsXkH3/8cVSadXFnB+4m4n4wgqEvvvhivx7+Y+TksKUvFD4rrrhi1HMKV1SS+2js2LHRoIGMzGxd+OfOrZuFLTm2L2kZtgxZfFA83dZbbx3t4lk1I6bnzTfftE23++67R+taEQEREAERKJ1AzQgf0JQqfiR6Sn+g0syB4DAjkNjM4n7Yfvnll6MWETtuy+uuu85WI+HCDkZaNiOOKJ9dcMEF7vrrr/d/NnLyn7Vw0XhD+fLbfuKFIguuw/YxGOJiiy3mN7nOeNf9O+64w5L63mfRhlZEQAREQARKJtDig5vjROzXdEMBzxI9cXJNvx0O5Pfbb79FFUAomNGyk89++eUX3yJEbyr+aG1h30q5gGWzyZMn22q95Ysvvuj4Cy2cewvXUzEWpvsxYe6usBUHV1t4rZTPuEA///yzQwjyh3CL16uYeiiNCIiACIiAczUnfLjpDYkfiZ5sfDTo3m1G926zsCWooZniERHW1R0BMjE3w3wYp1Osu8rOzaShTEaKFTsNxSKLLGLZHa61uDGGkBkjWBNjFLc55pgj2sUUFhI+EQ6tiIAIiEBJBGrK1RWSyef2kugJKTXv+sorrxxVgK7jZhb4y3bSTO+WjmUoGCwf4sWMKTFKsTCgefHFFy8qayh8wusgM4MthoMlEsuEoIr/heMM0U0/X6xQURVSIhEQARGoYQI1K3y453HxI9GTnU8C81wxTYPZnXfeaat+XBzb6Ny5s63WWzLVgwVBEztjrS3hNBnFihcrfEzQvZweVgRKF7IlcmPwhOKKoOnQGG3ajC74uLTy/dnghYggpsOQiYAIiIAIlE6gJl1dISZze7EvXA/TaL1pCdDqMWjQoOikxNXQ1dyMMXu23HJLv7nGGmv4ION4XAwHmRTUbMqUKbbq3V220b17d1utt2TwQ+t+vv/++7tp06a5zz//3Asom6D0wAMPdEceeWS9vLbjoIMOslU/gvRHH30UbXOdjARtxhxlhebjYjoOG5WaAR1vueUWy6qlCIiACIhAkQRqusXHGCF4JHqMRvMuGcSPiTzD8WqGDh1ap1JMJcGoxhgtOoMHD65znI2uXbu6Hj16RPvHjRsXrdOyZ0IJNxPzc8WNObNM9OAaQ/SYMe6PGSMyn3HGGfWmvOA4gyGGgdQ33nijZfNLWm3MhcX1FBI9ZKB3l/VuI8Cb1iSZCIiACIhAaQRqvsWnNFxKnSYBWmToJo54scH5wsBlznXPPff4Xk3x8w4bNixqFerWrZsfywfxijsL0cP8W9aN/PXXX3e33XZbVARj/4wcOTIa+ZlRnBEoTz75pHczIZiYqNSMQQRDY5TnPn36RG4uWm3oOo9woZfYwgsv7N107dq1i7IxJlHczcUUFGaU2ZAhjogxMsGDYBsyZEhD2XRcBERABEQgICDhE8DQatMSYOLNfEa8CxOVhrObh2mfeeYZ98ADD0QuL+JowtGYLS1ChIlK48ZYOauvvno0TQSxOknxOoipm266KZ7dz/91/PHHO0QXhmAjGDsMyLZMU3MtRowkHRqxRWGr1ujRo8PDedeJMcK9huHmk4mACIiACJRGQK6u0ngpdYUEzFUTL4b9jLHD9A60xhDPkk/0WF6mlTj99NPruKHsGIHADG6Iu8mCgu2YLZmb6/LLL/etPLbPlnSTx6V16aWX2q46S8pEUF122WXR9BJ1EgQbHXJTZuyzzz7BHlcnOJku9WEMUp2EsY1wCgu66YeDOcaSalMEREAERCCBQKtcs344GG1CEu1qDgLEjWTZCMTNkuEuw11FXA7iiYEAcWkVa3QPp/UGAcaAgaXk5RzMJca4Qx07dnSIJkTJEUccUWd0Z+p0wgkneIFXbL2UTgREQAREIF0CEj7p8kytNAmf1FA2W0H0/DrrrLPqdGdnAMW426vZKqgTi4AIiEANEpCrK6M3PexFlLUqZrluWWJFoHX//v0ds8ZjjM9zzjnnZKmKqosIiIAI1BwBBTdn9JbTMmCBs1mrInWTFU+A7vhMS0Hvsoam2Ci+VKUUAREQAREoh4BafMqh1gR56L1jM4I3wemKPgV1CkcvLjpjjSeku7pET40/BLp8ERCBTBBonRsT5ORM1ESVqEOAHk6MVkycCIGzzT03E+4tRkweMWKEXuB17pQ2REAEREAEqomAgpur6W6priIgAiIgAiIgAhURkKurInzKLAIiIAIiIAIiUE0EJHyq6W6priIgAiIgAiIgAhURkPCpCJ8yi4AIiIAIiIAIVBMBCZ9quluqqwiIgAiIgAiIQEUEJHwqwqfMIiACIiACIiAC1URAwqea7pbqKgIiIAIiIAIiUBEBCZ+K8CmzCIiACIiACIhANRGQ8Kmmu6W6ioAIiIAIiIAIVERAwqcifMosAiIgAiIgAiJQTQQkfKrpbqmuIiACIiACIiACFRGQ8KkInzKLgAiIgAiIgAhUEwEJn2q6W6qrCIiACIiACIhARQQkfCrCp8wiIAIiIAIiIALVREDCp5ruluoqAiIgAiIgAiJQEQEJn4rwKbMIiIAIiIAIiEA1EZDwqaa7pbqKgAiIgAiIgAhUREDCpyJ8yiwCIiACIiACIlBNBCR8quluqa4iIAIiIAIiIAIVEZDwqQifMouACIiACIiACFQTAQmfarpbqqsIiIAIiIAIiEBFBCR8KsKnzCIgAiIgAiIgAtVEQMKnmu6W6ioCIiACIiACIlARAQmfivApswiIgAiIgAiIQDURkPCppruluoqACIiACIiACFREQMKnInzKLAIiIAIiIAIiUE0EJHyq6W6priIgAiIgAiIgAhURkPCpCJ8yi4AIiIAIiIAIVBOBNtVUWdU12wQ6dOjg+vTp4zp37uzmmWeeiir78MMPu4ceeqiiMpRZBERABERABOIEJHziRLRdFgFEz4ABA1zbtm3Lyh/PtOmmm/pdEj9xMtoWAREQARGohIBcXZXQU96IAC09aYkeKxTx07t3b9vUUgREQAREQAQqJiDhUzFCFQAB3FuNYRI/jUFVZYqACIhA7RKQ8Knde5/qlVca02OVIbYnbhI/cSLaFgEREAERKJeAhE+55JSvUQgQ0yPx0yhoVagIiIAIiECOgISPHoPMEZD4ydwtUYVEQAREoMUQkPBpMbeyZV2IxE/Lup+6GhEQARHICgEJn6zcCdWjHgGJn3pItEMEREAERKBCAhI+FQJU9sYlIPHTuHxVugiIQOMSWGKJJdzAgQPdX//618Y9kUovmoAGMCwalRI2FwHED0bvrtBs246Hx7QuAiIgAlkgcNppp7n27du7DTfc0E2dOtX973//y0K1aroOEj41ffur5+JN3JjYsZrbth23/Vpmh0DPnj1dv379fIUefeQRN3LUqOxUrpFqwkjmQ4YM8aV/+umnjpdf2nbQQQe5tdZayxd78cUXu1deeSXtU5RUXlNcc0kVykjiNm3+fM0usMACGalVbVdDrq7avv9VdfWIm3xd3avqQmqssjT184XPX5fllquJq+cXvl3zsssu2yjXvFyOpZ1jkUUWaZRzlFJoU1xzKfXJStprrrnGff755+6FF15wDzzwQFaqVdP1+FOK1jQGXXy1ELCWHWvpqZZ6q54iIAK1SYAfa0k/2GqTRjauWsInG/dBtZhF4IwzzsgMi06dOrl1113XzTvvvO7ll192b731lvvjjz/cQgst5Nq1a+d+++03hxsjn/ErfPXVV3csP/zwQ/fSSy+577//PjE55+IX88yZM93EiRN9Gs679tprO1pM2DdhwgQfI5BYQLCTuq2zzjpuqaWWclOmTPF1nzRpUpDiz1VG3F5wwQX9Dn6V/vTTT26FFVbw1/3DDz+422677c/Es9bIg4uFaUomT57s3n33XX998YRLL720m2222VzHjh2jQ1zjMsss47dhB8O4wRduiy66qPviiy/ciy++6L788st4smi7nGuIMhe5wnV07drV0cqCS+ejjz7yzwPMQrNng7qb4eqwa+Z+5HsGFl54YX/faMXhfr/55pvuq6++smL8cvbZZ/fPAxtzzz13dIy8nGPGjBnuk08+ifaHK6U8F2G+htZLuWZ7zikThq1atfLPOGzffvtt99RTT9U7XTFc6mWatQMmlE0dP/vsM/faa68V/MxaOdxvnvHlunRxM37/3b366qvunXfe8Z9/S1PskuefzzLGNfMdkmR8nlZcccXouedzxZ8sfQKtevToMTP9YlVirRFobsFy3HHHpYYcocL12JeVFYwoGTFihNtqq63c/PPP73fvuuuudjha8mI8+eSToxdUdCC38vXXX7sTTzyx3gvt6quvdjbtB7EbnJ+XRGic/5FcjMywYcPC3XXWjz76aP8i4YUSGgLjnHPO8SIi3P+vf/3LrbHGGn7XmDFj3CabbOLmmmuuKEl4fbnvCte/f/86xy3hd9995/773/9GcSZMWHvdddfZ4cTlVVdd5e6///7oGC982Cy++OLRPlvhhc51P/bYY7YrWpZyDVGmEla22247t8ceezhER9wQfhdccIH74IMP/KHrr7++jiCJpycOJx7vs88++zgm+W3dunU8uXv//ffd0KFDo+dlm222caQvZH379vUCKExT6nMR5m1ovZRrDp/zK664wh1wwAFeHHOOqbln6O8HHhidrhQuUaZZK8svv7w76qijos9peJwA43PPPde98cYb4e5ofccdd3S77babC2NzOMjnb/To0e6mm26K0hazQvwVwgv797//7V1eYb7VVlvNf67sOyU89uOPPzqYjR8/Ptyt9QoJKManQoDK3rII8Ivr/PPPryd6uErEBEG6SV9QRoEvy6E5gUErTZLRuoJA4Is5n11yySX1RA9pOT8uvp122ikx67HHHutbDOKih8S8tAcPHuz4Us9nvHxD0ROmQ4wdeeSReY/PN9987vjjj3dLLrmkz5ZUh7C8+DrnRZgliR7SwvXQQw91RxxxRDxrne1C11AnYZEbe+65p/vb3/6WKHooYrHFFnNnnnmmXxZTZMiFe/Kf//zHIWaSRA/ldcm1OJDGjof5850vnqbS5yLfeYrdH6+P5TswJ3JoWYlbOVzCMvjBcOqpp+b9nPLD5JRTTvGtmmE+1rfYYgvHPY+LHo5xHbvssosX52yXY3EWtOjyoy3fdwo/IAYMGOA222yzck6nPHkIyNWVB4x21yaBw3IvV3vJ8AuPX1rPPvusf6FvtNFGeQWN0Tr77LNdh5wIwH7++WfHr2FcFriP9ttvPy8cKJ+X+GGHHWbZ6iz50v3ll1/8L0POjbuNL0hrcUD4xF1Qe++9d9TDh6b0u+66yz3xxBPejcUxXtDY7rvv7u677z5ffp2TztrgmnHH0MROHTDybr755rNSON/kT8sT7rutt97arbfeev5FwZc6L1lahXCTnXTSSf7FtuWWWzpaizBcONYS9N577/l9vPx4uVuL1+8518Ktt97qr59zI9Zwm2Gca4MNNnBPPvmk3076l3QNSeka2kd9QqH48ccfu3HjxnmXCWOy0FLGveSPlxfXTYsV7jxaDf/xj3/4U/Ac8FxgoWtshx12cJ1z7g2Me0bw69ixYz1LWhVXWWUV/7LFRYXwpMXrwQcfjNyKh/zzn67TrJYE8tFNGnah+zCt58JXMs+/Uq45XgT1/Tzngvog9yyZ27gcLlYuAppnyYQLLSbDhw93r7/+un+G9s99Bu3zefjhh3uu06ZN89lpefn73/9uRbnnn3/e3Xnnnf7eUCfcx1i3bt38vcFtVqnxzJgYoh7jcvfxpZxbne8a/sydiUjkMwcvWeUEJHwqZ6gSWgiBNddcM3qRcEm8rHgZYc8995wXG7iwVl55Zb8v/o9fisTVYIgGvkRNPPCl/vTTT/tma16UxIDwRUvcTtxw6yCKvvnmG3+Ilzy/Ymky50tyzjnn9AKKFypGr6Ftt93Wr/PvhBNO8OKEdWIKuAZrbufc+++/f153GW6VZ555hqyRhe4u6nv66adHxy688EKHgEHUYRYvxLq5ElZddVU2veFmiL8wcHcgFDCunRcSsT0Y4orrR1BZ120EBS/5fLESSdfgCyvxn4k1siEmcJ3YOeFA66C5rcwtibDDvv32W7/kH9cUv2b2hwH6V155pRdV7Md46SIoeMlixKlgxGBZWdNz4tKcodxn2+8T5v6l+VxYmUnLUq45zA8XXvz2nNuxcrhYXp4lhCKGkPhnThzaZ5A4Nz6DsEbUIo569+7t7rjjDp+eZ8xECPsQTGYMTUBZVreDDz7Yt8TY8XKWuHYRyWa0QnEfMeIJb7zxRv99QT35ccCzQKyhrHIC9dsZKy9TJYhAVRLgV7bZpJxQMdFj+1gifOzlF+5nvVevXtEuvrTsC9d28uvz8ccft03fchFtBCvEH8RfBgS52i9TkhJka8avUTMLwrRtWxJPYUZLQpIhJuKih3T80hw5cqT/46URN+J07JcoX9AmAuLp8m2vv/760SG6+5roiXbmVhAzdg5+BXfv3j08HK3nu4YoQQkrc8wxR8HUvJxokWNJYK61yBXMFBy8+eabI67WWzE47Fu9bDufK8SOJy3Tei6Syk5jH6I5/pxTbiVcaFk1u+GGG+p9Bvns0ppoxo8PjGcw3kpkaWx5+eWXR5/9Up9xKyNcIvwKGSL39ttv988Xz1j8+6RQXh0rTKBN4cM6KgK1QyD8Mru/wHgbuFKSLHw5IUDC8ix92OMm7Pljx1mG7pBw//Tp06PYozDweqWVVoqS5TtvWGaYN8qYW4n3ILJjBOSGg+MRd4DriXgeWp+wkEkpAoAWqPBXL4IxyWhx4dro7YXxguPXe9zyXUM8XTHbCL59993XJ+WaeFHz0sSFaC8t3CrlGuWHRgsALYbEOSEgYWMWrtu+hpZpPRcNnafc4+baiuevhIsFESNw4uXYedhvPGlRxHChmtHqkvTZ5Tg/XnheuT98jvL10LOyCi3Jy2fann9aD3FR47a0chGB/MnSJSDhky5PlVbFBAh6NLMmZ9tuaBl22SYtAdINWTxPQ+nzHbc4AI7jbuOvkJlYKZQmfowveuJd6OGE8EnLQvGHmDBBkVQ+MTYmfMylmJQurX285IjpsfgmulXjmjnkkEO8KwsBhEskbIkr9dy4XOi9xHQG9jIutYx86Zviuch37kr3l8OFHx7WavPrr7/mrQItKXfffXed4+FnEcFIB4OGDFdipa4nejYS78fni/tFDB5/xMhRNkI7n0BsqH46np+AhE9+NjpSYwTCF0+pLQfhF2ex2BpypRRbjn3ZF5ve4hiKTQ8XhJzF4Vg+flXjfmJZjpiiHHqDmYVBubYvXIZxM2HrWpgm7XXcG8Qw0bPLfpnDj9YZRCB/BIszOm+pBk/cmvEWMpiaACyXK3Vp7Oei1OstNn25XMLn0+Lfij2nxQUVm5504bNbSr4wLeKZ2KNBgwZFXd45Tn0I4ueP54+4PXsmwvxaL4+AhE953JSrBRL4JRcsbN25ebEx5k6xFqaNxxHkKyMpliVf2kL7EQwm2hiLp6EWCIuVKVRmeCwMPuba7r33Xt/SYc3xpL322mujoNIwb0PrYctaQy/q8MVGa0xTmY28S9dyujsTI4UrxAQksWG0BjFGSylGTzATPbRCMEYULUx2f7inoyqY16yxn4tSrrWUtOVyCQfptM9xsefF5WTGmEwElzdkofu3obSFjuNuoyURMc9wDPQWxI1sn2ni+S699NI6Pc4KladjDROQ8GmYkVLUCIFvcwOoWVdXmrEZqbVYI0gTUUCTNX8MdMZ2UxiDB5oooM78ikzTcMOY0aX60Ucftc2Kl4g0XvR8ySMCaOHIF8RpYwRx0lBoVlyJIgtgMEEbPBJ33zHHHBP1tuJlxX0v9p4TH2L3jPgoxmoJhWSRVSqYrLGfi4InL/NgJVzgZ5/BUlvKEOAWGM1wDrfcckuZV1B+Nlo0Eb/88SzRm5JxgzDc8PSOTEtslV/LlpFTvbpaxn3UVaRAgCHtzfjllWT8kuRLKcnwy5vhGslnxC/kKyNfnkL7w3rnG9yQ/LSolBOfE75EksQgx8M0heqadGz6rHFUOJZvVGLqTfdxs7TFnZUbLonzoMWFPxMpdpwWJwYuNKHD/bSu55am0DIc4BLhlyR6Ko1jauznotD1lXusUi7WEkhrHGNMJRnuI4QNfwwZgNmYUqwjYgs9z/k6B5C3FGMwTnu+4p9bnitGiA57vTGujywdAsnf4OmUrVJEoKoIhEPRxwft40J4udGLx1wc8YsLe5Hg/ghf1JaWfcSN0HQdjnljx8tZht1zaRUJB92z8viyprWCYEoLELZjDS3DQNG99tqrTnLE1HnnnVcnniQeuxS24CT1lrk11zpmxjgp8VGt4Y77w8Qi8RtJQw1YGWktcUXSEsUfYx8lmQkfjoWcwhiTMMjYygjdMjDs2bOnHfJLenYhrApZGPNhAzyG6Rv7uQjPxXpD1xxPn7RdKRcGeDQjSDguUriXobi2YQQYZsJizLgf4fNm5bEk1oYhHSg7bgxGmXSv4+lsmyEQ7Pni+4L1uP0UuHStfpaGeobDWth+Wk5xy8aNz15a3znxsqttW66uartjqm+jEeBLl19+9mXCaLmM78EXFF8aCIb4F2lYGcYN6dWrl0/DS5ph8xkXh27Y/IJkgEQGouOY+fPJU6nRCoMQsAH+6NXF4HvMa0VrAi0RjFdiv2KZdqOU3ij0pjI3AKPXMhgiI+ES14JIsTgVuw7ioybOGsiPfeQ3oxcXgzMS18CkrfRYYRyg7bff3n8p8+XPAInUnRGv4cRw/faFjdBgoLemMLoV25hHtAIwFhJ1ZmBGngX28fLBeClRXzPcJbiwEMlcE12V6X6PmCJ+hIEcEQoWi0KMBy0UcOP5o+XDhB5lJr0UmbjVRCJzrFEerY70WKI+jf1c2LXasqFrtnSFlpVyYbwpRgpHgPDHDwzGhsJNyWeY58wCmWFkI4CzjqBhkEKMnl2XX3aZG5uLuYIz23x+LaCZchCWFo+FSOVecM/5ccSApw0ZIg0RxjOCK4s4OT5XfGcwuSrPV9jSSPyeGc8HY1vxjOD2tXqTj1gz9lNvniuMOD1GG8d4PhhRvpZNwqeW776uvR4BetnwhWK/3Hjx2cuvXuKEHXwBMlUD+flCQ4CEIwBbFr7E0hA9Vh69rhhp2qamIEaJv7gRy8AItaUYc2jh9rGXPGOl2HgpVo695NmmVYsZ1c0QC7ROkB8mNhw/LyAL3mVSVsSBjZHSKycg+QuNc3B/QrdEeDztdQZDJJDbXCa89BAY/MWNF2zcENLmuoEJf2Hg7EUXXeR785CPF1X8noVM4UZZYddmBCNuGwzxSQ8zjBnObdymxnwu/Mli/xq65ljyxM1KuCCMGWWZFhueN7gwF1rcCCZnctvQCGInjsaYEu8XjlpuacnLvHUmethvnzXuE62WxQgfWkJ5nhmpHGGLCObHi/2AsfOx5IdAOAYY3ykmjPlRgPuceDnGI7L9fEb5scN51sqJNjPS1Lrwmc1gaCkCIvD/g/gxJQKtFLx4QuMLhBdJ6N4Ij7POL0vcIrTAhF+MHKM8Ak4RPMy4HFpYZrgeppkZBEvzCzU0fu0PHDjQD3YWP0Y6Yh94IdJtNqxXuD4jVqaVT9Al0zUwmnXcYEILUNgLJi4UET28zMJzxcvhhc4UH0ncSAs3ZkFPGlk6LDfpGvilywus2L8w3oJf4ZyXloj480C9eNETJ5I0azyCMYz7In1oXEu+NDBnRnVmLDdjzrbQGM2XVqlCVupzgbgqlpOlQ9CZ5bseOx4+2+G6HWdZKRem7uBZQmTGz8GzyDx0tJCEItLOz+f7rLPOSuwZybPO55tA9PhwFybGeUYQUMUarYCIKIRq+Bxbfnqb0dLIxMahIcot/Ve5lh3rycln3K6ZGC/qjD0X9FIjTa1bq5xyrPvtXutEdP1lEeAXe3Mav/DSNn454SbilxhuDAJQaZK2aRv4Eu3bt2/B0zK+D24iXpy8qOxLqWCmFA7S4oQA4Xz0BEkSQ+WchiBjmv2J48Hlw3UVa/z6xtVHq87kyZP9vET5eMDtL3/5i683LzDcKOUa7g9rrSq2DF7qSYY7A3cdbgTcdfZiSUrLPmvJoSUOlwQvzqQ8XC9l80zhGk1Kk+8cuF+5J7QaUH6hYRIaei4IyreWo3zni+9HrIZd+Yu95ng5SduVcLHyEHOdc/E33C+EarEGT7jCFxdUQ886bime0/hnzebJ47y0yoY/EuJ1MfcxgofnPino3fLAmecqLuCoN89ofD9uY8RSoTKt7Ja+lKurpd9hXV9JBGgyZiZkfmHxBRafRHTnnXeOyivmZUwafp01tdEcX0xze6n1ouWIF105lsQzXzlwy+IvU1oL+CvWEHa0BliLQL58XG8xz1NSfl5kSS1hSWkbei7yCdGksmxfvCWs2Gu2/IWWlXCxchEAcRFgxwotEQnxiV8LpTf3YjxNGAPXEF9EayHhGpZNWUnXRb2T9ocDgIbl1OK6hE8t3nVdcyIBfkHRVE9wLq0lgwcPrvMlhO/eAgQpoDkETWLFtbMgAVyXpbT4mAuhYKEt9CAxV2EQbTGXaW6WYtLWWhqeOwuI5tpLaXGqNVZNeb0SPk1JW+fKNAHcNzQTY7T8EJdCjARjadBMbAHPHOfX0/Dhw1mVZZyAmvaLv0GIvnDsmOJzKmVIgDix7t27+0lnCXjGaKEptjUnLEvr6RNQcHP6TFVilRJA5NA6QCyOGfE9+NFD0UN8SmPEFNk5tRQBEahuAnRFZyyd0M1lXeer+8paRu3V4tMy7qOuIiUC/OJlkDLG7KAXDQG2BFgS20IvCb68mE9JJgIiIAL5CBDvQ7A67nN+UI0fP94PHpovvfY3LQH16mpa3i32bIwNg3uoOYwYg7BXSXPUQecUAREQARGoDgJydVXHfcp8LcORepu6ss157qa+Vp1PBERABESgMgISPpXxU+5ZBOgJYhMENiUUzllqL5SmrJ/OJQIiIAIikC0CrXODO52crSqpNtVIgAHXGCiPwb4Y5M7mhWqsa8G9xVxEI0aMaHBgscaqg8oVAREQARGoPgKK8am+e6Yai4AIiIAIiIAIlElArq4ywSmbCIiACIiACIhA9RGQ8Km+e6Yai4AIiIAIiIAIlElAwqdMcMomAiIgAiIgAiJQfQQkfKrvnqnGIiACIiACIiACZRKQ8CkTnLKJgAiIgAiIgAhUHwEJn+q7Z6qxCIiACIiACIhAmQQkfMoEp2wiIAIiIAIiIALVR0DCp/rumWosAiIgAiIgAiJQJgEJnzLBKZsIiIAIiIAIiED1EZDwqb57phqLgAiIgAiIgAiUSUDCp0xwyiYCIiACIiACIlB9BCR8qu+eqcYiIAIiIAIiIAJlEpDwKROcsomACIiACIiACFQfAQmf6rtnqrEIiIAIiIAIiECZBCR8ygSnbCIgAiIgAiIgAtVHQMKn+u6ZaiwCIiACIiACIlAmAQmfMsEpmwiIgAiIgAiIQPURkPCpvnumGouACIiACIiACJRJQMKnTHDKJgIiIAIiIAIiUH0EJHyq756pxiIgAiIgAiIgAmUSkPApE5yyiYAIiIAIiIAIVB+B/wO9N/2l2KPKEwAAAABJRU5ErkJggg==)" + ], + "metadata": { + "id": "cVofNXVW-EMo" + } + }, + { + "cell_type": "code", + "source": [ + "\"\"\"\n", + " {\n", + " \"doc\": [\n", + " {\n", + " \"prompt_states\": {\n", + " \"type\": \"Prompt\",\n", + " \"data\": {\n", + " \"_components\": {\n", + " \"_ordered_dict\": true,\n", + " \"data\": []\n", + " },\n", + " \"_parameters\": {\n", + " \"_ordered_dict\": true,\n", + " \"data\": []\n", + " },\n", + " \"training\": false,\n", + " \"teacher_mode\": false,\n", + " \"tracing\": false,\n", + " \"name\": \"Prompt\",\n", + " \"_init_args\": {\n", + " \"template\": null,\n", + " \"prompt_kwargs\": {}\n", + " },\n", + " \"template\": \" You are a doctor User: {{input_str}}\",\n", + " \"prompt_variables\": [\n", + " \"input_str\"\n", + " ],\n", + " \"prompt_kwargs\": {}\n", + " }\n", + " },\n", + " \"time_stamp\": \"2024-11-29T12:36:33.302956\"\n", + " }\n", + " ]\n", + "}\n", + "\"\"\"" + ], + "metadata": { + "id": "dPd9i6_t7ERJ" + }, + "execution_count": null, + "outputs": [] + } + ] } diff --git a/tutorials/database.ipynb b/tutorials/database.ipynb index 8744b1b0..2db749f7 100644 --- a/tutorials/database.ipynb +++ b/tutorials/database.ipynb @@ -15,26 +15,26 @@ "outputs": [], "source": [ "# setup data needed for the notes\n", - "query_1 = \"What are the benefits of renewable energy?\" # gt is [0, 3]\n", - "query_2 = \"How do solar panels impact the environment?\" # gt is [1, 2]\n", + "query_1 = \"What are the benefits of renewable energy?\" # gt is [0, 3]\n", + "query_2 = \"How do solar panels impact the environment?\" # gt is [1, 2]\n", "\n", - "org_documents =[\n", + "org_documents = [\n", " {\n", " \"title\": \"The Impact of Renewable Energy on the Economy\",\n", - " \"content\": \"Renewable energy technologies not only help in reducing greenhouse gas emissions but also contribute significantly to the economy by creating jobs in the manufacturing and installation sectors. The growth in renewable energy usage boosts local economies through increased investment in technology and infrastructure.\"\n", + " \"content\": \"Renewable energy technologies not only help in reducing greenhouse gas emissions but also contribute significantly to the economy by creating jobs in the manufacturing and installation sectors. The growth in renewable energy usage boosts local economies through increased investment in technology and infrastructure.\",\n", " },\n", " {\n", " \"title\": \"Understanding Solar Panels\",\n", - " \"content\": \"Solar panels convert sunlight into electricity by allowing photons, or light particles, to knock electrons free from atoms, generating a flow of electricity. Solar panels are a type of renewable energy technology that has been found to have a significant positive effect on the environment by reducing the reliance on fossil fuels.\"\n", + " \"content\": \"Solar panels convert sunlight into electricity by allowing photons, or light particles, to knock electrons free from atoms, generating a flow of electricity. Solar panels are a type of renewable energy technology that has been found to have a significant positive effect on the environment by reducing the reliance on fossil fuels.\",\n", " },\n", " {\n", " \"title\": \"Pros and Cons of Solar Energy\",\n", - " \"content\": \"While solar energy offers substantial environmental benefits, such as reducing carbon footprints and pollution, it also has downsides. The production of solar panels can lead to hazardous waste, and large solar farms require significant land, which can disrupt local ecosystems.\"\n", + " \"content\": \"While solar energy offers substantial environmental benefits, such as reducing carbon footprints and pollution, it also has downsides. The production of solar panels can lead to hazardous waste, and large solar farms require significant land, which can disrupt local ecosystems.\",\n", " },\n", " {\n", - " \"title\": \"Renewable Energy and Its Effects\",\n", - " \"content\": \"Renewable energy sources like wind, solar, and hydro power play a crucial role in combating climate change. They do not produce greenhouse gases during operation, making them essential for sustainable development. However, the initial setup and material sourcing for these technologies can still have environmental impacts.\"\n", - " }\n", + " \"title\": \"Renewable Energy and Its Effects\",\n", + " \"content\": \"Renewable energy sources like wind, solar, and hydro power play a crucial role in combating climate change. They do not produce greenhouse gases during operation, making them essential for sustainable development. However, the initial setup and material sourcing for these technologies can still have environmental impacts.\",\n", + " },\n", "]\n", "\n", "turns = [\n", @@ -42,14 +42,14 @@ " \"user\": \"What are the benefits of renewable energy?\",\n", " \"system\": \"I can see you are interested in renewable energy. Renewable energy technologies not only help in reducing greenhouse gas emissions but also contribute significantly to the economy by creating jobs in the manufacturing and installation sectors. The growth in renewable energy usage boosts local economies through increased investment in technology and infrastructure.\",\n", " \"user_time\": \"2021-09-01T12:00:00Z\",\n", - " \"system_time\": \"2021-09-01T12:00:01Z\"\n", + " \"system_time\": \"2021-09-01T12:00:01Z\",\n", " },\n", " {\n", " \"user\": \"How do solar panels impact the environment?\",\n", " \"system\": \"Solar panels convert sunlight into electricity by allowing photons, or light particles, to knock electrons free from atoms, generating a flow of electricity. Solar panels are a type of renewable energy technology that has been found to have a significant positive effect on the environment by reducing the reliance on fossil fuels.\",\n", " \"user_time\": \"2021-09-01T12:00:02Z\",\n", - " \"system_time\": \"2021-09-01T12:00:03Z\"\n", - " }\n", + " \"system_time\": \"2021-09-01T12:00:03Z\",\n", + " },\n", "]" ] }, @@ -78,7 +78,10 @@ "from adalflow.core.types import Document\n", "\n", "# we will save the content to text and title in the meta_data\n", - "documents = [Document(text=doc['content'], meta_data={'title': doc['title']}) for doc in org_documents]\n", + "documents = [\n", + " Document(text=doc[\"content\"], meta_data={\"title\": doc[\"title\"]})\n", + " for doc in org_documents\n", + "]\n", "print(documents)" ] }, @@ -151,7 +154,7 @@ "source": [ "# prepare the data pipeline\n", "\n", - "from adalflow.core.embedder import Embedder \n", + "from adalflow.core.embedder import Embedder\n", "from adalflow.core.types import ModelClientType\n", "from adalflow.components.data_process import DocumentSplitter, ToEmbeddings\n", "from adalflow.core.component import Sequential\n", @@ -163,14 +166,10 @@ " \"encoding_format\": \"float\",\n", "}\n", "\n", - "splitter_config = {\n", - " \"split_by\": \"word\",\n", - " \"split_length\": 50,\n", - " \"split_overlap\": 10\n", - "}\n", + "splitter_config = {\"split_by\": \"word\", \"split_length\": 50, \"split_overlap\": 10}\n", "\n", "splitter = DocumentSplitter(**splitter_config)\n", - "embedder = Embedder(model_client =ModelClientType.OPENAI(), model_kwargs=model_kwargs)\n", + "embedder = Embedder(model_client=ModelClientType.OPENAI(), model_kwargs=model_kwargs)\n", "embedder_transformer = ToEmbeddings(embedder, batch_size=2)\n", "data_transformer = Sequential(splitter, embedder_transformer)\n", "print(data_transformer)" @@ -185,13 +184,19 @@ "# prepare mapping functions to map the data to Document object for the pipeline\n", "\n", "from typing import Dict\n", + "\n", + "\n", "# mapping function for org_documents\n", "def map_to_document(doc: Dict) -> Document:\n", - " return Document(text=doc['content'], meta_data={'title': doc['title']})\n", + " return Document(text=doc[\"content\"], meta_data={\"title\": doc[\"title\"]})\n", + "\n", "\n", "def map_dialogturn_to_document(turn: DialogTurn) -> Document:\n", " # it can be important to keep the original data's id\n", - " return Document(id=turn.id, text=turn.user_query.query_str + ' ' + turn.assistant_response.response_str)" + " return Document(\n", + " id=turn.id,\n", + " text=turn.user_query.query_str + \" \" + turn.assistant_response.response_str,\n", + " )" ] }, { @@ -304,7 +309,7 @@ "# create a db for the dialog_turns\n", "from adalflow.core.db import LocalDB\n", "\n", - "dialog_turn_db = LocalDB('dialog_turns')\n", + "dialog_turn_db = LocalDB(\"dialog_turns\")\n", "print(dialog_turn_db)\n", "\n", "dialog_turn_db.load(dialog_turns)\n", @@ -397,7 +402,7 @@ ], "source": [ "# save the state of the dialog_turn_db\n", - "dialog_turn_db.save_state('dialog_turn_db_state.pkl')\n", + "dialog_turn_db.save_state(\"dialog_turn_db_state.pkl\")\n", "\n", "print(dialog_turn_db)" ] @@ -432,8 +437,8 @@ ], "source": [ "# restore the state of the restored_dialog_turn_db\n", - "restored_dialog_turn_db = LocalDB.load_state('dialog_turn_db_state.pkl')\n", - "print(restored_dialog_turn_db)\n" + "restored_dialog_turn_db = LocalDB.load_state(\"dialog_turn_db_state.pkl\")\n", + "print(restored_dialog_turn_db)" ] }, { @@ -537,18 +542,15 @@ } ], "source": [ - "# prepare the generator for the dialog turns \n", + "# prepare the generator for the dialog turns\n", "\n", "from adalflow.core import Generator\n", "\n", - "llm_kwargs = {\n", - " \"model\": \"gpt-3.5-turbo\"\n", - "}\n", + "llm_kwargs = {\"model\": \"gpt-3.5-turbo\"}\n", "\n", "# we will use the default prompt, and using input_str and chat_history_str for the final prompt\n", - "generator = Generator(model_client = ModelClientType.OPENAI(), model_kwargs=llm_kwargs)\n", - "print(generator)\n", - "\n" + "generator = Generator(model_client=ModelClientType.OPENAI(), model_kwargs=llm_kwargs)\n", + "print(generator)" ] }, { @@ -614,16 +616,18 @@ ], "source": [ "# lets see how the prompt will be if we pass the input_str and chat_history_str\n", - "input_str = \"What are the benefits of renewable energy? Did I ask this before?\" \n", + "input_str = \"What are the benefits of renewable energy? Did I ask this before?\"\n", + "\n", "\n", "def format_chat_history_str(turns: list) -> str:\n", " chat_history_str = []\n", " for turn in turns:\n", - " chat_history_str.append(turn.to_yaml()) # format as yaml\n", + " chat_history_str.append(turn.to_yaml()) # format as yaml\n", " # join with newline\n", - " chat_history_str = '\\n_________\\n'.join(chat_history_str)\n", + " chat_history_str = \"\\n_________\\n\".join(chat_history_str)\n", " return chat_history_str\n", "\n", + "\n", "chat_history_str = format_chat_history_str(dialog_turns)\n", "print(generator.print_prompt(input_str=input_str, chat_history_str=chat_history_str))" ] @@ -663,31 +667,33 @@ } ], "source": [ - "# as we have quite a bit of empty fields, lets exclude them \n", + "# as we have quite a bit of empty fields, lets exclude them\n", "from typing import List\n", "\n", - "input_str = \"What are the benefits of renewable energy? Did I ask this before?\" \n", + "input_str = \"What are the benefits of renewable energy? Did I ask this before?\"\n", + "\n", "\n", "def format_chat_history_str(turns: List[DialogTurn]) -> str:\n", " chat_history_str = []\n", " for turn in turns:\n", " chat_history_str.append(\n", - " turn.to_yaml(\n", - " exclude=[\n", - " \"id\",\n", - " \"user_id\",\n", - " \"session_id\",\n", - " \"user_query_timestamp\",\n", - " \"assistant_response_timestamp\",\n", - " \"order\",\n", - " \"metadata\",\n", - " \"vector\",\n", - " ],\n", - " )\n", - " ) \n", - " chat_history_str = '\\n_________\\n'.join(chat_history_str)\n", + " turn.to_yaml(\n", + " exclude=[\n", + " \"id\",\n", + " \"user_id\",\n", + " \"session_id\",\n", + " \"user_query_timestamp\",\n", + " \"assistant_response_timestamp\",\n", + " \"order\",\n", + " \"metadata\",\n", + " \"vector\",\n", + " ],\n", + " )\n", + " )\n", + " chat_history_str = \"\\n_________\\n\".join(chat_history_str)\n", " return chat_history_str\n", "\n", + "\n", "chat_history_str = format_chat_history_str(dialog_turn_db.items[0:1])\n", "print(generator.print_prompt(input_str=input_str, chat_history_str=chat_history_str))" ] @@ -861,6 +867,7 @@ "source": [ "# we will use the retriever to find top_k chunked documents, from its partent_doc_id, we will find the initial dialog_turn, and feed that to the generator\n", "from adalflow.utils.logger import get_logger\n", + "\n", "get_logger()\n", "\n", "embeddings = [item.vector for item in dialog_turn_db.transformed_items[key]]\n", @@ -868,7 +875,7 @@ "retriever.build_index_from_documents(documents=embeddings)\n", "\n", "# top_k_documents = retriever(input=input_str)\n", - "# print(top_k_documents)\n" + "# print(top_k_documents)" ] }, { @@ -907,7 +914,12 @@ "source": [ "# get the parent_doc_id from the top_k_documents\n", "\n", - "parent_doc_ids = set([dialog_turn_db.transformed_items[key][doc_index].parent_doc_id for doc_index in top_k_documents[0].doc_indices])\n", + "parent_doc_ids = set(\n", + " [\n", + " dialog_turn_db.transformed_items[key][doc_index].parent_doc_id\n", + " for doc_index in top_k_documents[0].doc_indices\n", + " ]\n", + ")\n", "print(parent_doc_ids)" ] }, @@ -917,7 +929,9 @@ "metadata": {}, "outputs": [], "source": [ - "fetched_dialog_turns=dialog_turn_db.fetch_items(condition=lambda x: x.id in parent_doc_ids)" + "fetched_dialog_turns = dialog_turn_db.fetch_items(\n", + " condition=lambda x: x.id in parent_doc_ids\n", + ")" ] }, { @@ -941,7 +955,9 @@ "source": [ "chat_history_str = format_chat_history_str(fetched_dialog_turns)\n", "\n", - "output = generator(prompt_kwargs={\"input_str\": input_str, \"chat_history_str\": chat_history_str})\n", + "output = generator(\n", + " prompt_kwargs={\"input_str\": input_str, \"chat_history_str\": chat_history_str}\n", + ")\n", "print(output)" ] } diff --git a/tutorials/dataclass.ipynb b/tutorials/dataclass.ipynb index e2631c2b..06be8204 100644 --- a/tutorials/dataclass.ipynb +++ b/tutorials/dataclass.ipynb @@ -8,23 +8,23 @@ "source": [ "from dataclasses import dataclass, field\n", "\n", + "\n", "@dataclass\n", "class Question:\n", - " question: str = field(\n", - " metadata={\"desc\": \"The question asked by the user\"}\n", - " )\n", + " question: str = field(metadata={\"desc\": \"The question asked by the user\"})\n", " metadata: dict = field(\n", " metadata={\"desc\": \"The metadata of the question\"}, default_factory=dict\n", " )\n", "\n", + "\n", "@dataclass\n", "class TrecData:\n", " question: Question = field(\n", " metadata={\"desc\": \"The question asked by the user\"}\n", - " ) # Required field, you have to provide the question field at the instantiation\n", + " ) # Required field, you have to provide the question field at the instantiation\n", " label: int = field(\n", " metadata={\"desc\": \"The label of the question\"}, default=0\n", - " ) # Optional field" + " ) # Optional field" ] }, { @@ -49,6 +49,7 @@ "print(example)\n", "\n", "from dataclasses import asdict\n", + "\n", "print(asdict(example))\n", "reconstructed = TrecData(**asdict(example))\n", "print(reconstructed)\n", @@ -97,22 +98,24 @@ "metadata": {}, "outputs": [], "source": [ - "# lets see what DataClass can do \n", + "# lets see what DataClass can do\n", "# 1. allow required field after optional field using required_field on default_factory\n", "\n", "from adalflow.core import DataClass, required_field\n", "\n", + "\n", "@dataclass\n", "class TrecData2(DataClass):\n", " question: Question = field(\n", " metadata={\"desc\": \"The question asked by the user\"}\n", - " ) # Required field, you have to provide the question field at the instantiation\n", + " ) # Required field, you have to provide the question field at the instantiation\n", " label: int = field(\n", " metadata={\"desc\": \"The label of the question\"}, default=0\n", - " ) # Optional field\n", + " ) # Optional field\n", " metadata: dict = field(\n", - " metadata={\"desc\": \"The metadata of the question\"}, default_factory=required_field()\n", - " ) # required field" + " metadata={\"desc\": \"The metadata of the question\"},\n", + " default_factory=required_field(),\n", + " ) # required field" ] }, { @@ -140,10 +143,10 @@ "example = TrecData2(Question(\"What is the capital of France?\"), 1, {\"key\": \"value\"})\n", "print(example)\n", "\n", - "dict_example = TrecData2.to_dict(example) # use as if its a class method\n", + "dict_example = TrecData2.to_dict(example) # use as if its a class method\n", "print(dict_example)\n", "\n", - "dict_example_2 = example.to_dict() # use it as instance method\n", + "dict_example_2 = example.to_dict() # use it as instance method\n", "print(dict_example)\n", "\n", "reconstructed = TrecData2.from_dict(dict_example)\n", @@ -178,7 +181,9 @@ "print(dict_exclude)\n", "\n", "# exclude field of the parent and child class\n", - "dict_exclude = example.to_dict(exclude={\"TrecData2\": [\"metadata\"], \"Question\": [\"metadata\"]})\n", + "dict_exclude = example.to_dict(\n", + " exclude={\"TrecData2\": [\"metadata\"], \"Question\": [\"metadata\"]}\n", + ")\n", "print(dict_exclude)" ] }, @@ -271,8 +276,7 @@ "print(example_str)\n", "\n", "example_str = example.format_example_str(DataClassFormatType.EXAMPLE_YAML)\n", - "print(example_str)\n", - "\n" + "print(example_str)" ] }, { @@ -298,7 +302,7 @@ ], "source": [ "# Now, lets check the data format using class method without instance\n", - "# schema, you can choose to only use properties \n", + "# schema, you can choose to only use properties\n", "\n", "schema = TrecData2.to_schema()\n", "schema" @@ -326,7 +330,9 @@ ], "source": [ "# schema with exclude\n", - "schema_exclude = TrecData2.to_schema(exclude={\"TrecData2\": [\"metadata\"], \"Question\": [\"metadata\"]})\n", + "schema_exclude = TrecData2.to_schema(\n", + " exclude={\"TrecData2\": [\"metadata\"], \"Question\": [\"metadata\"]}\n", + ")\n", "schema_exclude" ] }, @@ -373,7 +379,9 @@ "source": [ "# exclude field of the parent and child class\n", "\n", - "json_signature_exclude = TrecData2.to_json_signature(exclude={\"TrecData2\": [\"metadata\"], \"Question\": [\"metadata\"]})\n", + "json_signature_exclude = TrecData2.to_json_signature(\n", + " exclude={\"TrecData2\": [\"metadata\"], \"Question\": [\"metadata\"]}\n", + ")\n", "print(json_signature_exclude)" ] }, @@ -464,7 +472,7 @@ } ], "source": [ - "# use the DataClassFormatType to control it \n", + "# use the DataClassFormatType to control it\n", "\n", "from adalflow.core import DataClassFormatType\n", "\n", @@ -494,6 +502,8 @@ "source": [ "# load with customizd from dict\n", "from typing import Dict\n", + "\n", + "\n", "@dataclass\n", "class OutputFormat(DataClass):\n", " thought: str = field(\n", @@ -521,6 +531,7 @@ " }\n", " return super().from_dict(data)\n", "\n", + "\n", "data = OutputFormat.from_dict({\"coarse_label\": 1})\n", "print(data)" ] diff --git a/tutorials/embedder.ipynb b/tutorials/embedder.ipynb index 29625454..c5c60527 100644 --- a/tutorials/embedder.ipynb +++ b/tutorials/embedder.ipynb @@ -148,6 +148,7 @@ "source": [ "from adalflow.core.embedder import Embedder\n", "from adalflow.components.model_client import TransformersClient\n", + "\n", "# from adalflow.utils import enable_library_logging\n", "\n", "# enable_library_logging(level=\"DEBUG\")\n", @@ -508,8 +509,10 @@ "from typing import List\n", "from adalflow.core.component import Component\n", "from copy import deepcopy\n", + "\n", + "\n", "class DecreaseEmbeddingDim(Component):\n", - " def __init__(self, old_dim: int, new_dim: int, normalize: bool = True):\n", + " def __init__(self, old_dim: int, new_dim: int, normalize: bool = True):\n", " super().__init__()\n", " self.old_dim = old_dim\n", " self.new_dim = new_dim\n", @@ -525,7 +528,7 @@ " new_embedding = normalize_vector(new_embedding)\n", " embedding.embedding = new_embedding\n", " return output\n", - " \n", + "\n", " def _extra_repr(self) -> str:\n", " repr_str = f\"old_dim={self.old_dim}, new_dim={self.new_dim}, normalize={self.normalize}\"\n", " return repr_str" diff --git a/tutorials/generator.ipynb b/tutorials/generator.ipynb index e8a3fac2..bc93b021 100644 --- a/tutorials/generator.ipynb +++ b/tutorials/generator.ipynb @@ -49,10 +49,10 @@ "\n", "enable_library_logging(level=\"DEBUG\")\n", "\n", - "model_kwargs={\n", + "model_kwargs = {\n", " \"model\": \"gpt-3.5-turbo\",\n", " \"logprobs\": True,\n", - " \"n\": 2, # the number of chat completion choices\n", + " \"n\": 2, # the number of chat completion choices\n", "}\n", "model_client = OpenAIClient(chat_completion_parser=get_probabilities)\n", "generator = Generator(model_client=model_client, model_kwargs=model_kwargs)\n", @@ -80,7 +80,7 @@ "source": [ "from adalflow.core import Component, Generator\n", "from adalflow.components.model_client import GroqAPIClient\n", - "from adalflow.utils import setup_env # noqa\n", + "from adalflow.utils import setup_env # noqa\n", "\n", "\n", "class SimpleQA(Component):\n", @@ -93,7 +93,9 @@ " You:\n", " \"\"\"\n", " self.generator = Generator(\n", - " model_client=GroqAPIClient(), model_kwargs={\"model\": \"llama3-8b-8192\"}, template=template\n", + " model_client=GroqAPIClient(),\n", + " model_kwargs={\"model\": \"llama3-8b-8192\"},\n", + " template=template,\n", " )\n", "\n", " def call(self, query):\n", diff --git a/tutorials/model_client.ipynb b/tutorials/model_client.ipynb index 3e5b7b06..b61e7ec7 100644 --- a/tutorials/model_client.ipynb +++ b/tutorials/model_client.ipynb @@ -36,9 +36,9 @@ "\n", "prompt = f\"User: {query}\\n\"\n", "model_kwargs = {\"model\": \"gpt-3.5-turbo\", \"temperature\": 0.5, \"max_tokens\": 100}\n", - "api_kwargs = openai_client.convert_inputs_to_api_kwargs(input=prompt, \n", - " model_kwargs=model_kwargs, \n", - " model_type=model_type)\n", + "api_kwargs = openai_client.convert_inputs_to_api_kwargs(\n", + " input=prompt, model_kwargs=model_kwargs, model_type=model_type\n", + ")\n", "print(f\"api_kwargs: {api_kwargs}\")\n", "\n", "response = openai_client.call(api_kwargs=api_kwargs, model_type=model_type)\n", @@ -49,15 +49,20 @@ "model_type = ModelType.EMBEDDER\n", "# do batch embedding\n", "input = [query] * 2\n", - "model_kwargs = {\"model\": \"text-embedding-3-small\", \"dimensions\": 8, \"encoding_format\": \"float\"}\n", - "api_kwargs = openai_client.convert_inputs_to_api_kwargs(input=input, model_kwargs=model_kwargs, model_type=model_type)\n", + "model_kwargs = {\n", + " \"model\": \"text-embedding-3-small\",\n", + " \"dimensions\": 8,\n", + " \"encoding_format\": \"float\",\n", + "}\n", + "api_kwargs = openai_client.convert_inputs_to_api_kwargs(\n", + " input=input, model_kwargs=model_kwargs, model_type=model_type\n", + ")\n", "print(f\"api_kwargs: {api_kwargs}\")\n", "\n", "\n", - "\n", "response = openai_client.call(api_kwargs=api_kwargs, model_type=model_type)\n", "reponse_embedder_output = openai_client.parse_embedding_response(response)\n", - "print(f\"reponse_embedder_output: {reponse_embedder_output}\")\n" + "print(f\"reponse_embedder_output: {reponse_embedder_output}\")" ] }, { diff --git a/tutorials/rag.ipynb b/tutorials/rag.ipynb index 8892f0a2..b5163e51 100644 --- a/tutorials/rag.ipynb +++ b/tutorials/rag.ipynb @@ -16,11 +16,12 @@ "outputs": [], "source": [ "# the data pipeline and the backend data processing\n", - "from adalflow.core.embedder import Embedder \n", + "from adalflow.core.embedder import Embedder\n", "from adalflow.core.types import ModelClientType\n", "from adalflow.components.data_process import TextSplitter, ToEmbeddings\n", "from adalflow.core.container import Sequential\n", "\n", + "\n", "def prepare_data_pipeline():\n", " model_kwargs = {\n", " \"model\": \"text-embedding-3-small\",\n", @@ -28,14 +29,12 @@ " \"encoding_format\": \"float\",\n", " }\n", "\n", - " splitter_config = {\n", - " \"split_by\": \"word\",\n", - " \"split_length\": 50,\n", - " \"split_overlap\": 10\n", - " }\n", + " splitter_config = {\"split_by\": \"word\", \"split_length\": 50, \"split_overlap\": 10}\n", "\n", " splitter = TextSplitter(**splitter_config)\n", - " embedder = Embedder(model_client =ModelClientType.OPENAI(), model_kwargs=model_kwargs)\n", + " embedder = Embedder(\n", + " model_client=ModelClientType.OPENAI(), model_kwargs=model_kwargs\n", + " )\n", " embedder_transformer = ToEmbeddings(embedder, batch_size=2)\n", " data_transformer = Sequential(splitter, embedder_transformer)\n", " print(data_transformer)" diff --git a/tutorials/react_note.ipynb b/tutorials/react_note.ipynb index 0b647a4b..b1cc8bba 100644 --- a/tutorials/react_note.ipynb +++ b/tutorials/react_note.ipynb @@ -120,7 +120,7 @@ " test_react_agent(ModelClientType.OPENAI(), gpt_model_kwargs)\n", " print(\"Done\")\n", "\n", - " test_react_agent_use_examples(ModelClientType.GROQ(), llama3_model_kwargs)\n" + " test_react_agent_use_examples(ModelClientType.GROQ(), llama3_model_kwargs)" ] }, { @@ -134,6 +134,7 @@ "import asyncio\n", "import time\n", "\n", + "\n", "def is_running_in_event_loop() -> bool:\n", " try:\n", " loop = asyncio.get_running_loop()\n", @@ -143,7 +144,8 @@ " return False\n", " except RuntimeError:\n", " return False\n", - " \n", + "\n", + "\n", "def sync_func():\n", " time.sleep(1)\n", " print(\"Sync function\")\n", diff --git a/tutorials/retriever.ipynb b/tutorials/retriever.ipynb index c464f46b..859a6de8 100644 --- a/tutorials/retriever.ipynb +++ b/tutorials/retriever.ipynb @@ -23,26 +23,26 @@ "outputs": [], "source": [ "# decide a meaningful query and a list of documents\n", - "query_1 = \"What are the benefits of renewable energy?\" # gt is [0, 3]\n", - "query_2 = \"How do solar panels impact the environment?\" # gt is [1, 2]\n", + "query_1 = \"What are the benefits of renewable energy?\" # gt is [0, 3]\n", + "query_2 = \"How do solar panels impact the environment?\" # gt is [1, 2]\n", "\n", - "documents =[\n", + "documents = [\n", " {\n", " \"title\": \"The Impact of Renewable Energy on the Economy\",\n", - " \"content\": \"Renewable energy technologies not only help in reducing greenhouse gas emissions but also contribute significantly to the economy by creating jobs in the manufacturing and installation sectors. The growth in renewable energy usage boosts local economies through increased investment in technology and infrastructure.\"\n", + " \"content\": \"Renewable energy technologies not only help in reducing greenhouse gas emissions but also contribute significantly to the economy by creating jobs in the manufacturing and installation sectors. The growth in renewable energy usage boosts local economies through increased investment in technology and infrastructure.\",\n", " },\n", " {\n", " \"title\": \"Understanding Solar Panels\",\n", - " \"content\": \"Solar panels convert sunlight into electricity by allowing photons, or light particles, to knock electrons free from atoms, generating a flow of electricity. Solar panels are a type of renewable energy technology that has been found to have a significant positive effect on the environment by reducing the reliance on fossil fuels.\"\n", + " \"content\": \"Solar panels convert sunlight into electricity by allowing photons, or light particles, to knock electrons free from atoms, generating a flow of electricity. Solar panels are a type of renewable energy technology that has been found to have a significant positive effect on the environment by reducing the reliance on fossil fuels.\",\n", " },\n", " {\n", " \"title\": \"Pros and Cons of Solar Energy\",\n", - " \"content\": \"While solar energy offers substantial environmental benefits, such as reducing carbon footprints and pollution, it also has downsides. The production of solar panels can lead to hazardous waste, and large solar farms require significant land, which can disrupt local ecosystems.\"\n", + " \"content\": \"While solar energy offers substantial environmental benefits, such as reducing carbon footprints and pollution, it also has downsides. The production of solar panels can lead to hazardous waste, and large solar farms require significant land, which can disrupt local ecosystems.\",\n", " },\n", " {\n", - " \"title\": \"Renewable Energy and Its Effects\",\n", - " \"content\": \"Renewable energy sources like wind, solar, and hydro power play a crucial role in combating climate change. They do not produce greenhouse gases during operation, making them essential for sustainable development. However, the initial setup and material sourcing for these technologies can still have environmental impacts.\"\n", - " }\n", + " \"title\": \"Renewable Energy and Its Effects\",\n", + " \"content\": \"Renewable energy sources like wind, solar, and hydro power play a crucial role in combating climate change. They do not produce greenhouse gases during operation, making them essential for sustainable development. However, the initial setup and material sourcing for these technologies can still have environmental impacts.\",\n", + " },\n", "]" ] }, @@ -67,7 +67,7 @@ ], "source": [ "# create an embedder\n", - "from adalflow.core.embedder import Embedder \n", + "from adalflow.core.embedder import Embedder\n", "from adalflow.core.types import ModelClientType\n", "\n", "\n", @@ -77,7 +77,7 @@ " \"encoding_format\": \"float\",\n", "}\n", "\n", - "embedder = Embedder(model_client =ModelClientType.OPENAI(), model_kwargs=model_kwargs)\n", + "embedder = Embedder(model_client=ModelClientType.OPENAI(), model_kwargs=model_kwargs)\n", "embedder" ] }, @@ -128,7 +128,7 @@ "\n", "from adalflow.components.retriever import FAISSRetriever\n", "\n", - "# pass the documents in the initialization \n", + "# pass the documents in the initialization\n", "documents_embeddings = [x.embedding for x in output.data]\n", "retriever = FAISSRetriever(top_k=2, embedder=embedder, documents=documents_embeddings)\n", "retriever" @@ -153,7 +153,7 @@ "# execute the retriever\n", "output_1 = retriever(input=query_1)\n", "output_2 = retriever(input=query_2)\n", - "output_3 = retriever(input = [query_1, query_2])\n", + "output_3 = retriever(input=[query_1, query_2])\n", "print(output_1)\n", "print(output_2)\n", "print(output_3)" @@ -198,7 +198,7 @@ "\n", "output_1 = retriever_1(input=query_1)\n", "output_2 = retriever_1(input=query_2)\n", - "output_3 = retriever_1(input = [query_1, query_2])\n", + "output_3 = retriever_1(input=[query_1, query_2])\n", "print(output_1)\n", "print(output_2)\n", "print(output_3)" @@ -229,7 +229,9 @@ "\n", "document_map_func = lambda x: x[\"content\"]\n", "\n", - "bm25_retriever = BM25Retriever(top_k=2, documents=documents, document_map_func=document_map_func)\n", + "bm25_retriever = BM25Retriever(\n", + " top_k=2, documents=documents, document_map_func=document_map_func\n", + ")\n", "print(bm25_retriever)" ] }, @@ -250,7 +252,10 @@ "source": [ "# show how a word splitter and a token splitter differs\n", "\n", - "from adalflow.components.retriever.bm25_retriever import split_text_by_word_fn_then_lower_tokenized, split_text_by_word_fn\n", + "from adalflow.components.retriever.bm25_retriever import (\n", + " split_text_by_word_fn_then_lower_tokenized,\n", + " split_text_by_word_fn,\n", + ")\n", "\n", "query_1_words = split_text_by_word_fn(query_1)\n", "query_1_tokens = split_text_by_word_fn_then_lower_tokenized(query_1)\n", @@ -277,7 +282,7 @@ "source": [ "output_1 = bm25_retriever(input=query_1)\n", "output_2 = bm25_retriever(input=query_2)\n", - "output_3 = bm25_retriever(input = [query_1, query_2])\n", + "output_3 = bm25_retriever(input=[query_1, query_2])\n", "print(output_1)\n", "print(output_2)\n", "print(output_3)" @@ -324,7 +329,7 @@ "\n", "output_1 = bm25_retriever(input=query_1_short)\n", "output_2 = bm25_retriever(input=query_2_short)\n", - "output_3 = bm25_retriever(input = [query_1_short, query_2_short])\n", + "output_3 = bm25_retriever(input=[query_1_short, query_2_short])\n", "print(output_1)\n", "print(output_2)\n", "print(output_3)" @@ -351,11 +356,13 @@ "document_map_func = lambda x: x[\"title\"] + \" \" + x[\"content\"]\n", "\n", "print(documents)\n", - "bm25_retriever.build_index_from_documents(documents=documents, document_map_func=document_map_func)\n", + "bm25_retriever.build_index_from_documents(\n", + " documents=documents, document_map_func=document_map_func\n", + ")\n", "\n", "output_1 = bm25_retriever(input=query_1_short)\n", "output_2 = bm25_retriever(input=query_2_short)\n", - "output_3 = bm25_retriever(input = [query_1_short, query_2_short])\n", + "output_3 = bm25_retriever(input=[query_1_short, query_2_short])\n", "print(output_1)\n", "print(output_2)\n", "print(output_3)" @@ -425,7 +432,9 @@ "source": [ "# build index and run queries\n", "document_map_func = lambda x: x[\"content\"]\n", - "reranker.build_index_from_documents(documents=documents, document_map_func=document_map_func)\n", + "reranker.build_index_from_documents(\n", + " documents=documents, document_map_func=document_map_func\n", + ")\n", "\n", "print(reranker)" ] @@ -449,7 +458,7 @@ "# run queries\n", "output_1 = reranker(input=query_1)\n", "output_2 = reranker(input=query_2)\n", - "output_3 = reranker(input = [query_1, query_2])\n", + "output_3 = reranker(input=[query_1, query_2])\n", "print(output_1)\n", "print(output_2)\n", "print(output_3)" @@ -484,7 +493,7 @@ " documents=documents,\n", " document_map_func=document_map_func,\n", ")\n", - "print(reranker)\n" + "print(reranker)" ] }, { @@ -495,6 +504,7 @@ "source": [ "# run queries\n", "import torch\n", + "\n", "# Set the number of threads for PyTorch, avoid segementation fault\n", "torch.set_num_threads(1)\n", "torch.set_num_interop_threads(1)" @@ -516,11 +526,9 @@ } ], "source": [ - "\n", - "\n", "output_1 = reranker(input=query_1)\n", "output_2 = reranker(input=query_2)\n", - "output_3 = reranker(input = [query_1, query_2])\n", + "output_3 = reranker(input=[query_1, query_2])\n", "print(output_1)\n", "print(output_2)\n", "print(output_3)" @@ -551,17 +559,19 @@ ], "source": [ "# try to use title this time\n", - "document_map_func = lambda x: x[\"title\"] + \" \" + x[\"content\"] # no \n", + "document_map_func = lambda x: x[\"title\"] + \" \" + x[\"content\"] # no\n", "\n", - "reranker.build_index_from_documents(documents=documents, document_map_func=document_map_func)\n", + "reranker.build_index_from_documents(\n", + " documents=documents, document_map_func=document_map_func\n", + ")\n", "\n", "# run queries\n", "output_1 = reranker(input=query_1)\n", "output_2 = reranker(input=query_2)\n", - "output_3 = reranker(input = [query_1, query_2])\n", + "output_3 = reranker(input=[query_1, query_2])\n", "print(output_1)\n", "print(output_2)\n", - "print(output_3)\n" + "print(output_3)" ] }, { @@ -617,12 +627,12 @@ "}\n", "document_map_func = lambda x: x[\"content\"]\n", "llm_retriever = LLMRetriever(\n", - " top_k=2, \n", - " model_client=model_client, \n", - " model_kwargs=model_kwargs, \n", - " documents=documents, \n", - " document_map_func=document_map_func\n", - " )\n", + " top_k=2,\n", + " model_client=model_client,\n", + " model_kwargs=model_kwargs,\n", + " documents=documents,\n", + " document_map_func=document_map_func,\n", + ")\n", "print(llm_retriever)" ] }, @@ -645,7 +655,7 @@ "# run queries\n", "output_1 = llm_retriever(input=query_1)\n", "output_2 = llm_retriever(input=query_2)\n", - "output_3 = llm_retriever(input = [query_1, query_2])\n", + "output_3 = llm_retriever(input=[query_1, query_2])\n", "print(output_1)\n", "print(output_2)\n", "print(output_3)" @@ -674,7 +684,7 @@ "}\n", "output_1 = llm_retriever(model_kwargs=model_kwargs, input=query_1)\n", "output_2 = llm_retriever(model_kwargs=model_kwargs, input=query_2)\n", - "output_3 = llm_retriever(model_kwargs=model_kwargs, input = [query_1, query_2])\n", + "output_3 = llm_retriever(model_kwargs=model_kwargs, input=[query_1, query_2])\n", "print(output_1)\n", "print(output_2)\n", "print(output_3)" @@ -739,12 +749,14 @@ "import fsspec\n", "import os\n", "import time\n", + "\n", + "\n", "def get_local_file_metadata(file_path: str):\n", " stat = os.stat(file_path)\n", " return {\n", - " 'size': stat.st_size, # File size in bytes\n", - " 'creation_date': time.ctime(stat.st_ctime), # Creation time\n", - " 'last_modified_date': time.ctime(stat.st_mtime) # Last modification time\n", + " \"size\": stat.st_size, # File size in bytes\n", + " \"creation_date\": time.ctime(stat.st_ctime), # Creation time\n", + " \"last_modified_date\": time.ctime(stat.st_mtime), # Last modification time\n", " }\n", "\n", "\n", @@ -774,9 +786,9 @@ " Returns:\n", " str: The content of the text file.\n", " \"\"\"\n", - " with fsspec.open(file_path, 'r') as file:\n", + " with fsspec.open(file_path, \"r\") as file:\n", " content = file.read()\n", - " return content\n" + " return content" ] }, { @@ -804,8 +816,8 @@ } ], "source": [ - "text = load_text_file('paul_graham/paul_graham_essay.txt')\n", - "file_metadata = get_local_file_metadata('paul_graham/paul_graham_essay.txt')\n", + "text = load_text_file(\"paul_graham/paul_graham_essay.txt\")\n", + "file_metadata = get_local_file_metadata(\"paul_graham/paul_graham_essay.txt\")\n", "print(text[:1000])\n", "print(file_metadata)" ] @@ -839,9 +851,12 @@ "from adalflow.core.types import Document\n", "\n", "# sentence splitting is confusing, the length needs to be smaller\n", - "metadata = {\"title\": \"Paul Graham's essay\", \"path\": \"data/paul_graham/paul_graham_essay.txt\"}\n", + "metadata = {\n", + " \"title\": \"Paul Graham's essay\",\n", + " \"path\": \"data/paul_graham/paul_graham_essay.txt\",\n", + "}\n", "metadata.update(file_metadata)\n", - "documents = [Document(text = text, meta_data = metadata)]\n", + "documents = [Document(text=text, meta_data=metadata)]\n", "splitter = DocumentSplitter(split_by=\"word\", split_length=800, split_overlap=200)\n", "\n", "print(documents)\n", @@ -925,7 +940,7 @@ ], "source": [ "# split the document\n", - "splitted_documents = splitter(documents = documents)\n", + "splitted_documents = splitter(documents=documents)\n", "print(splitted_documents[0], len(splitted_documents))" ] }, @@ -1303,15 +1318,20 @@ "\n", "# 1. set up the tracing for failed call as the retriever has generator attribute\n", "\n", + "\n", "@trace_generator_call(save_dir=\"tutorials/traces\")\n", "class LoggedLLMRetriever(LLMRetriever):\n", " pass\n", + "\n", + "\n", "top_k = 2\n", "retriever = LoggedLLMRetriever(\n", - " top_k = top_k, model_client=OpenAIClient(), model_kwargs={\"model\": \"gpt-3.5-turbo\"}\n", + " top_k=top_k, model_client=OpenAIClient(), model_kwargs={\"model\": \"gpt-3.5-turbo\"}\n", ")\n", "\n", - "retriever.build_index_from_documents(documents=[doc.text for doc in splitted_documents[0:16]])\n", + "retriever.build_index_from_documents(\n", + " documents=[doc.text for doc in splitted_documents[0:16]]\n", + ")\n", "\n", "print(retriever)\n", "retriever.generator.print_prompt()" @@ -1373,7 +1393,9 @@ "source": [ "# output[0].documents = [splitted_documents[idx] for idx in output[0].doc_indices]\n", "for per_query_output in output:\n", - " per_query_output.documents = [splitted_documents[idx] for idx in per_query_output.doc_indices]\n", + " per_query_output.documents = [\n", + " splitted_documents[idx] for idx in per_query_output.doc_indices\n", + " ]\n", "print(\"output.documents\", output[0].documents)\n", "len(output)" ] @@ -1537,51 +1559,51 @@ "source": [ "# create data transformer\n", "data_transformer_config = { # attribute and its config to recreate the component\n", - " \"embedder\":{\n", - " \"component_name\": \"Embedder\",\n", - " \"component_config\": {\n", - " \"model_client\": {\n", - " \"component_name\": \"OpenAIClient\",\n", - " \"component_config\": {},\n", - " },\n", - " \"model_kwargs\": {\n", - " \"model\": \"text-embedding-3-small\",\n", - " \"dimensions\": 256,\n", - " \"encoding_format\": \"float\",\n", - " },\n", + " \"embedder\": {\n", + " \"component_name\": \"Embedder\",\n", + " \"component_config\": {\n", + " \"model_client\": {\n", + " \"component_name\": \"OpenAIClient\",\n", + " \"component_config\": {},\n", " },\n", - " },\n", - " \"document_splitter\": {\n", - " \"component_name\": \"DocumentSplitter\",\n", - " \"component_config\": {\n", - " \"split_by\": \"word\",\n", - " \"split_length\": 400,\n", - " \"split_overlap\": 200,\n", + " \"model_kwargs\": {\n", + " \"model\": \"text-embedding-3-small\",\n", + " \"dimensions\": 256,\n", + " \"encoding_format\": \"float\",\n", " },\n", " },\n", - " \"to_embeddings\": {\n", - " \"component_name\": \"ToEmbeddings\",\n", - " \"component_config\": {\n", - " \"vectorizer\": {\n", - " \"component_name\": \"Embedder\",\n", - " \"component_config\": {\n", - " \"model_client\": {\n", - " \"component_name\": \"OpenAIClient\",\n", - " \"component_config\": {},\n", - " },\n", - " \"model_kwargs\": {\n", - " \"model\": \"text-embedding-3-small\",\n", - " \"dimensions\": 256,\n", - " \"encoding_format\": \"float\",\n", - " },\n", + " },\n", + " \"document_splitter\": {\n", + " \"component_name\": \"DocumentSplitter\",\n", + " \"component_config\": {\n", + " \"split_by\": \"word\",\n", + " \"split_length\": 400,\n", + " \"split_overlap\": 200,\n", + " },\n", + " },\n", + " \"to_embeddings\": {\n", + " \"component_name\": \"ToEmbeddings\",\n", + " \"component_config\": {\n", + " \"vectorizer\": {\n", + " \"component_name\": \"Embedder\",\n", + " \"component_config\": {\n", + " \"model_client\": {\n", + " \"component_name\": \"OpenAIClient\",\n", + " \"component_config\": {},\n", + " },\n", + " \"model_kwargs\": {\n", + " \"model\": \"text-embedding-3-small\",\n", + " \"dimensions\": 256,\n", + " \"encoding_format\": \"float\",\n", " },\n", - " # the other config is to instantiate the entity (class and function) with the given config as arguments\n", - " # \"entity_state\": \"storage/embedder.pkl\", # this will load back the state of the entity\n", " },\n", - " \"batch_size\": 100,\n", + " # the other config is to instantiate the entity (class and function) with the given config as arguments\n", + " # \"entity_state\": \"storage/embedder.pkl\", # this will load back the state of the entity\n", " },\n", + " \"batch_size\": 100,\n", " },\n", - " }" + " },\n", + "}" ] }, { @@ -1653,7 +1675,9 @@ "source": [ "from adalflow.core.component import Sequential\n", "\n", - "data_transformer = Sequential(components[\"document_splitter\"], components[\"to_embeddings\"])\n", + "data_transformer = Sequential(\n", + " components[\"document_splitter\"], components[\"to_embeddings\"]\n", + ")\n", "data_transformer" ] }, @@ -1861,7 +1885,7 @@ "source": [ "# test using only the document splitter\n", "text_split = components[\"document_splitter\"](documents)\n", - "print(text_split)\n" + "print(text_split)" ] }, { @@ -2132,7 +2156,7 @@ } ], "source": [ - "# check the length of all documents,text \n", + "# check the length of all documents,text\n", "lengths = set([doc.estimated_num_tokens for doc in documents])\n", "print(lengths)" ] @@ -2155,7 +2179,7 @@ "for doc in documents:\n", " if len(doc.vector) != 256:\n", " print(doc)\n", - " total+=1\n", + " total += 1\n", "print(total)" ] }, @@ -2334,9 +2358,9 @@ } ], "source": [ - "len_documents=len(restored_db.documents)\n", + "len_documents = len(restored_db.documents)\n", "keys = list(restored_db.transformed_documents.keys())\n", - "len_transformed_documents=len(restored_db.transformed_documents[keys[0]])\n", + "len_transformed_documents = len(restored_db.transformed_documents[keys[0]])\n", "print(len_documents, len_transformed_documents, keys)" ] }, @@ -2367,7 +2391,7 @@ ], "source": [ "# lets' print out part of the vector\n", - "restored_db.transformed_documents[keys[0]][0].vector[0:10]\n" + "restored_db.transformed_documents[keys[0]][0].vector[0:10]" ] }, { @@ -2397,11 +2421,9 @@ } ], "source": [ - "\n", "from adalflow.components.retriever import FAISSRetriever\n", "\n", "\n", - "\n", "retriever = FAISSRetriever(embedder=components[\"embedder\"], top_k=5)\n", "print(retriever)" ] @@ -2447,6 +2469,7 @@ "source": [ "# convert vectors to numpy array\n", "import numpy as np\n", + "\n", "vectors_np = np.array(vectors, dtype=np.float32)" ] }, @@ -2521,7 +2544,9 @@ "source": [ "# get initial documents\n", "for per_query_output in output:\n", - " per_query_output.documents = [documents[idx] for idx in per_query_output.doc_indices]\n", + " per_query_output.documents = [\n", + " documents[idx] for idx in per_query_output.doc_indices\n", + " ]\n", "\n", "output" ] @@ -2591,7 +2616,9 @@ "outputs": [], "source": [ "retriever = BM25Retriever(top_k=1)\n", - "retriever.build_index_from_documents([\"hello world\", \"world is beautiful\", \"today is a good day\"])\n", + "retriever.build_index_from_documents(\n", + " [\"hello world\", \"world is beautiful\", \"today is a good day\"]\n", + ")\n", "output = retriever.retrieve(\"hello\")\n", "output" ] diff --git a/tutorials/tools.ipynb b/tutorials/tools.ipynb index c32b9420..092ef764 100644 --- a/tutorials/tools.ipynb +++ b/tutorials/tools.ipynb @@ -20,6 +20,7 @@ "\n", "client = OpenAI()\n", "\n", + "\n", "# Example dummy function hard coded to return the same weather\n", "# In production, this could be your backend API or an external API\n", "def get_current_weather(location, unit=\"fahrenheit\"):\n", @@ -27,15 +28,23 @@ " if \"tokyo\" in location.lower():\n", " return json.dumps({\"location\": \"Tokyo\", \"temperature\": \"10\", \"unit\": unit})\n", " elif \"san francisco\" in location.lower():\n", - " return json.dumps({\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": unit})\n", + " return json.dumps(\n", + " {\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": unit}\n", + " )\n", " elif \"paris\" in location.lower():\n", " return json.dumps({\"location\": \"Paris\", \"temperature\": \"22\", \"unit\": unit})\n", " else:\n", " return json.dumps({\"location\": location, \"temperature\": \"unknown\"})\n", "\n", + "\n", "def run_conversation():\n", " # Step 1: send the conversation and available functions to the model\n", - " messages = [{\"role\": \"user\", \"content\": \"What's the weather like in San Francisco, Tokyo, and Paris?\"}]\n", + " messages = [\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": \"What's the weather like in San Francisco, Tokyo, and Paris?\",\n", + " }\n", + " ]\n", " tools = [\n", " {\n", " \"type\": \"function\",\n", @@ -95,6 +104,8 @@ " messages=messages,\n", " ) # get a new response from the model where it can see the function response\n", " return second_response\n", + "\n", + "\n", "print(run_conversation())" ] }, @@ -110,16 +121,19 @@ "import time\n", "import asyncio\n", "\n", + "\n", "def multiply(a: int, b: int) -> int:\n", " \"\"\"Multiply two numbers.\"\"\"\n", " time.sleep(1)\n", " return a * b\n", "\n", + "\n", "def add(a: int, b: int) -> int:\n", " \"\"\"Add two numbers.\"\"\"\n", " time.sleep(1)\n", " return a + b\n", "\n", + "\n", "async def divide(a: float, b: float) -> float:\n", " \"\"\"Divide two numbers.\"\"\"\n", " await asyncio.sleep(1)\n", @@ -136,15 +150,20 @@ " \"\"\"Sum the elements of an array.\"\"\"\n", " return np.sum(arr)\n", "\n", + "\n", "x = 2\n", + "\n", + "\n", "@dataclass\n", "class Point:\n", " x: int\n", " y: int\n", "\n", + "\n", "def add_points(p1: Point, p2: Point) -> Point:\n", " return Point(p1.x + p2.x, p1.y + p2.y)\n", "\n", + "\n", "all_functions = [multiply, add, divide, search, numpy_sum, add_points]\n", "\n", "all_functions_dict = {f.__name__: f for f in all_functions}" @@ -173,10 +192,8 @@ "\n", "from adalflow.core.func_tool import FunctionTool\n", "\n", - "functions =[multiply, add, divide, search, numpy_sum, add_points]\n", - "tools = [\n", - " FunctionTool(fn=fn) for fn in functions\n", - "]\n", + "functions = [multiply, add, divide, search, numpy_sum, add_points]\n", + "tools = [FunctionTool(fn=fn) for fn in functions]\n", "for tool in tools:\n", " print(tool)" ] @@ -188,7 +205,7 @@ "outputs": [], "source": [ "# create a context map\n", - "context_map = {tool.definition.func_name: tool for tool in tools}\n" + "context_map = {tool.definition.func_name: tool for tool in tools}" ] }, { @@ -295,7 +312,7 @@ } ], "source": [ - "# execute get_current_weather using function call \n", + "# execute get_current_weather using function call\n", "\n", "ft.call(**{\"location\": \"San Francisco\", \"unit\": \"celsius\"})" ] @@ -344,8 +361,7 @@ "print(tools[2].execute(**{\"a\": 10, \"b\": 2}))\n", "\n", "display(await tools[2].acall(**{\"a\": 10, \"b\": 2}))\n", - "display(await tools[2].execute(**{\"a\": 10, \"b\": 2}))\n", - "\n" + "display(await tools[2].execute(**{\"a\": 10, \"b\": 2}))" ] }, { @@ -442,34 +458,38 @@ } ], "source": [ - "# call all the above functions \n", + "# call all the above functions\n", "import nest_asyncio\n", "\n", "nest_asyncio.apply()\n", "\n", "\n", - "\n", "async def async_function_1():\n", " await asyncio.sleep(1)\n", " return \"Function 1 completed\"\n", "\n", + "\n", "def sync_function_1():\n", " time.sleep(1)\n", " return \"Function 1 completed\"\n", "\n", + "\n", "async def async_function_2():\n", " await asyncio.sleep(2)\n", " return \"Function 2 completed\"\n", "\n", + "\n", "def sync_function_2():\n", " time.sleep(2)\n", " return \"Function 2 completed\"\n", "\n", + "\n", "async_tool_1 = FunctionTool(async_function_1)\n", "sync_tool_1 = FunctionTool(sync_function_2)\n", "async_tool_2 = FunctionTool(async_function_2)\n", "sync_tool_2 = FunctionTool(sync_function_2)\n", "\n", + "\n", "def run_sync_and_async_mix_without_wait():\n", " # both sync and async tool can use execute\n", " # sync tool can also use call\n", @@ -484,6 +504,7 @@ " print(f\"run_sync_and_async_mix_without_wait time: {end_time - start_time}\")\n", " return results\n", "\n", + "\n", "async def run_sync_and_async_mix():\n", " # both sync and async tool can use execute&to_thread\n", " # async tool can also use acall without to_thread\n", @@ -492,13 +513,13 @@ " results = await asyncio.gather(\n", " async_tool_1.execute(),\n", " sync_tool_1.execute(),\n", - " \n", " async_tool_2.acall(),\n", " )\n", " end_time = time.time()\n", " print(f\"run_sync_and_async_mix time: {end_time - start_time}\")\n", " return results\n", "\n", + "\n", "# Execute functions\n", "results_without_wait = run_sync_and_async_mix_without_wait()\n", "display(results_without_wait)\n", @@ -675,7 +696,7 @@ "small_tool_manager = ToolManager(tools=tools[:2])\n", "\n", "renered_prompt = prompt(tools=tool_manager.yaml_definitions)\n", - "print(renered_prompt)\n" + "print(renered_prompt)" ] }, { @@ -703,16 +724,16 @@ } ], "source": [ - "# let's render the output format using Function class \n", + "# let's render the output format using Function class\n", "\n", "from adalflow.core.types import Function\n", "\n", "\n", - "output_data_class = Function \n", + "output_data_class = Function\n", "output_format_str = output_data_class.to_json_signature(exclude=[\"thought\"])\n", "\n", - "renered_prompt= prompt(output_format_str=output_format_str)\n", - "print(renered_prompt)\n" + "renered_prompt = prompt(output_format_str=output_format_str)\n", + "print(renered_prompt)" ] }, { @@ -776,7 +797,7 @@ "\n", "func_parser = JsonOutputParser(data_class=Function)\n", "instructions = func_parser.format_instructions(exclude=[\"thought\"])\n", - "print(instructions)\n" + "print(instructions)" ] }, { @@ -844,9 +865,7 @@ "model_kwargs = {\"model\": \"gpt-3.5-turbo\"}\n", "prompt_kwargs = {\n", " \"tools\": tool_manager.yaml_definitions,\n", - " \"output_format_str\": func_parser.format_instructions(\n", - " exclude=[\"thought\", \"args\"]\n", - " ),\n", + " \"output_format_str\": func_parser.format_instructions(exclude=[\"thought\", \"args\"]),\n", "}\n", "generator = Generator(\n", " model_client=ModelClientType.OPENAI(),\n", @@ -887,14 +906,14 @@ "source": [ "# call queries\n", "queries = [\n", - " \"add 2 and 3\",\n", - " \"search for something\",\n", - " \"add points (1, 2) and (3, 4)\",\n", - " \"sum numpy array with arr = np.array([[1, 2], [3, 4]])\",\n", - " \"multiply 2 with local variable x\",\n", - " \"divide 2 by 3\",\n", - " \"Add 5 to variable y\",\n", - " ]" + " \"add 2 and 3\",\n", + " \"search for something\",\n", + " \"add points (1, 2) and (3, 4)\",\n", + " \"sum numpy array with arr = np.array([[1, 2], [3, 4]])\",\n", + " \"multiply 2 with local variable x\",\n", + " \"divide 2 by 3\",\n", + " \"Add 5 to variable y\",\n", + "]" ] }, { @@ -1046,7 +1065,6 @@ } ], "source": [ - "\n", "for idx, query in enumerate(queries):\n", " prompt_kwargs = {\"input_str\": query}\n", " print(f\"\\n{idx} Query: {query}\")\n", @@ -1056,10 +1074,12 @@ " # print(f\"LLM raw output: {result.raw_response}\")\n", " func = Function.from_dict(result.data)\n", " print(f\"Function: {func}\")\n", - " func_output= tool_manager.execute_func(func)\n", + " func_output = tool_manager.execute_func(func)\n", " display(f\"Function output: {func_output}\")\n", " except Exception as e:\n", - " print(f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\")" + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )" ] }, { @@ -1100,7 +1120,7 @@ } ], "source": [ - "# let's use FunctionExpression to call the function instead \n", + "# let's use FunctionExpression to call the function instead\n", "\n", "from adalflow.core.types import FunctionExpression\n", "\n", @@ -1109,7 +1129,9 @@ "print(output_format_str)\n", "\n", "# lets' add one example to be more robust that they should call it with function call expression\n", - "example = FunctionExpression.from_function(thought=None, func=add_points, **{\"p1\": Point(1, 2), \"p2\": Point(3, 4)})\n", + "example = FunctionExpression.from_function(\n", + " thought=None, func=add_points, **{\"p1\": Point(1, 2), \"p2\": Point(3, 4)}\n", + ")\n", "print(example)" ] }, @@ -1258,15 +1280,15 @@ "instructions = parser.format_instructions(exclude=[\"thought\"])\n", "\n", "prompt_kwargs = {\n", - " \"tools\": [tool.definition.to_yaml() for tool in tools],\n", - " \"output_format_str\": parser.format_instructions(exclude=[\"thought\"]),\n", - " }\n", + " \"tools\": [tool.definition.to_yaml() for tool in tools],\n", + " \"output_format_str\": parser.format_instructions(exclude=[\"thought\"]),\n", + "}\n", "generator = Generator(\n", " model_client=ModelClientType.OPENAI(),\n", " model_kwargs=model_kwargs,\n", " template=template,\n", " prompt_kwargs=prompt_kwargs,\n", - " output_processors=parser\n", + " output_processors=parser,\n", ")\n", "\n", "generator.print_prompt(**prompt_kwargs)" @@ -1291,67 +1313,65 @@ "\n", "# Define a list of safe built-ins\n", "SAFE_BUILTINS = {\n", - " 'abs': abs,\n", - " 'all': all,\n", - " 'any': any,\n", - " 'bin': bin,\n", - " 'bool': bool,\n", - " 'bytearray': bytearray,\n", - " 'bytes': bytes,\n", - " 'callable': callable,\n", - " 'chr': chr,\n", - " 'complex': complex,\n", - " 'dict': dict,\n", - " 'divmod': divmod,\n", - " 'enumerate': enumerate,\n", - " 'filter': filter,\n", - " 'float': float,\n", - " 'format': format,\n", - " 'frozenset': frozenset,\n", - " 'getattr': getattr,\n", - " 'hasattr': hasattr,\n", - " 'hash': hash,\n", - " 'hex': hex,\n", - " 'int': int,\n", - " 'isinstance': isinstance,\n", - " 'issubclass': issubclass,\n", - " 'iter': iter,\n", - " 'len': len,\n", - " 'list': list,\n", - " 'map': map,\n", - " 'max': max,\n", - " 'min': min,\n", - " 'next': next,\n", - " 'object': object,\n", - " 'oct': oct,\n", - " 'ord': ord,\n", - " 'pow': pow,\n", - " 'range': range,\n", - " 'repr': repr,\n", - " 'reversed': reversed,\n", - " 'round': round,\n", - " 'set': set,\n", - " 'slice': slice,\n", - " 'sorted': sorted,\n", - " 'str': str,\n", - " 'sum': sum,\n", - " 'tuple': tuple,\n", - " 'type': type,\n", - " 'zip': zip,\n", + " \"abs\": abs,\n", + " \"all\": all,\n", + " \"any\": any,\n", + " \"bin\": bin,\n", + " \"bool\": bool,\n", + " \"bytearray\": bytearray,\n", + " \"bytes\": bytes,\n", + " \"callable\": callable,\n", + " \"chr\": chr,\n", + " \"complex\": complex,\n", + " \"dict\": dict,\n", + " \"divmod\": divmod,\n", + " \"enumerate\": enumerate,\n", + " \"filter\": filter,\n", + " \"float\": float,\n", + " \"format\": format,\n", + " \"frozenset\": frozenset,\n", + " \"getattr\": getattr,\n", + " \"hasattr\": hasattr,\n", + " \"hash\": hash,\n", + " \"hex\": hex,\n", + " \"int\": int,\n", + " \"isinstance\": isinstance,\n", + " \"issubclass\": issubclass,\n", + " \"iter\": iter,\n", + " \"len\": len,\n", + " \"list\": list,\n", + " \"map\": map,\n", + " \"max\": max,\n", + " \"min\": min,\n", + " \"next\": next,\n", + " \"object\": object,\n", + " \"oct\": oct,\n", + " \"ord\": ord,\n", + " \"pow\": pow,\n", + " \"range\": range,\n", + " \"repr\": repr,\n", + " \"reversed\": reversed,\n", + " \"round\": round,\n", + " \"set\": set,\n", + " \"slice\": slice,\n", + " \"sorted\": sorted,\n", + " \"str\": str,\n", + " \"sum\": sum,\n", + " \"tuple\": tuple,\n", + " \"type\": type,\n", + " \"zip\": zip,\n", "}\n", "\n", + "\n", "# Define a context manager to limit execution time\n", "# Create a sandbox execution function\n", "def sandbox_exec(code, context=SAFE_BUILTINS, timeout=5):\n", "\n", " try:\n", - " compiled_code = compile(code, '', 'exec')\n", + " compiled_code = compile(code, \"\", \"exec\")\n", "\n", " # Result dictionary to store execution results\n", - " result = {\n", - " \"output\" : None,\n", - " \"error\" : None\n", - " }\n", + " result = {\"output\": None, \"error\": None}\n", "\n", " # Define a target function for the thread\n", " def target():\n", @@ -1360,7 +1380,6 @@ " exec(compiled_code, context, result)\n", " except Exception as e:\n", " result[\"error\"] = e\n", - " \n", "\n", " # Create a thread to execute the code\n", " thread = threading.Thread(target=target)\n", @@ -1377,6 +1396,7 @@ "\n", " return result\n", "\n", + "\n", "# Example usage\n", "code = \"\"\"\n", "def add(a, b+5):\n", @@ -1391,7 +1411,7 @@ "except TimeoutError as e:\n", " print(e)\n", "except Exception as e:\n", - " print(\"Sandbox error:\", e)\n" + " print(\"Sandbox error:\", e)" ] }, { @@ -1510,23 +1530,23 @@ } ], "source": [ - "# run the generator but we will use FunctionTool.parse_function_call_expr and have a context map \n", + "# run the generator but we will use FunctionTool.parse_function_call_expr and have a context map\n", "\n", "all_functions_dict.update(\n", " {\n", - " \"Point\": Point,\n", - " # support numpy\n", - " \"np\": np,\n", - " \"np.ndarray\": np.ndarray,\n", - " \"array\": np.array,\n", - " \"arr\": arr,\n", - " \"np.array\": np.array,\n", - " \"x\": x\n", + " \"Point\": Point,\n", + " # support numpy\n", + " \"np\": np,\n", + " \"np.ndarray\": np.ndarray,\n", + " \"array\": np.array,\n", + " \"arr\": arr,\n", + " \"np.array\": np.array,\n", + " \"x\": x,\n", " }\n", ")\n", - "y=4\n", + "y = 4\n", "print(all_functions_dict)\n", - "for query in queries+[\"Add 5 to variable y\"]:\n", + "for query in queries + [\"Add 5 to variable y\"]:\n", "\n", " try:\n", " print(f\"Query: {query}\")\n", @@ -1537,10 +1557,14 @@ " func_expr = FunctionExpression.from_dict(result.data)\n", "\n", " print(func_expr)\n", - " assert isinstance(func_expr, FunctionExpression), f\"Expected FunctionExpression, got {type(result.data)}\"\n", + " assert isinstance(\n", + " func_expr, FunctionExpression\n", + " ), f\"Expected FunctionExpression, got {type(result.data)}\"\n", "\n", " # more secure way to handle function call\n", - " func: Function = FunctionTool.parse_function_call_expr(expr=func_expr.action, context_map=all_functions_dict)\n", + " func: Function = FunctionTool.parse_function_call_expr(\n", + " expr=func_expr.action, context_map=all_functions_dict\n", + " )\n", " print(func)\n", " fun_output = all_functions_dict[func.name](*func.args, **func.kwargs)\n", " print(\"func output:\", fun_output)\n", @@ -1558,18 +1582,24 @@ " print(\"sandbox output:\", result)\n", " except Exception as e:\n", " print(e)\n", - " print(f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\")\n", + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )\n", " try:\n", " fun_output = eval(func_expr.action)\n", " print(\"func output:\", fun_output)\n", "\n", - " #sandbox_exec\n", + " # sandbox_exec\n", " action = \"output=\" + func_expr.action\n", - " result = sandbox_exec(action, context={**SAFE_BUILTINS, **all_functions_dict})\n", + " result = sandbox_exec(\n", + " action, context={**SAFE_BUILTINS, **all_functions_dict}\n", + " )\n", " print(\"sandbox output:\", result)\n", " except Exception as e:\n", " print(e)\n", - " print(f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\")" + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )" ] }, { @@ -1776,20 +1806,27 @@ } ], "source": [ - "queries = [\"add 2 and 3\", \"search for something\", \"add points (1, 2) and (3, 4)\", \"sum numpy array with arr = np.array([[1, 2], [3, 4]])\", \"multiply 2 with local variable x\", \"divide 2 by 3\"]\n", + "queries = [\n", + " \"add 2 and 3\",\n", + " \"search for something\",\n", + " \"add points (1, 2) and (3, 4)\",\n", + " \"sum numpy array with arr = np.array([[1, 2], [3, 4]])\",\n", + " \"multiply 2 with local variable x\",\n", + " \"divide 2 by 3\",\n", + "]\n", "\n", - "from adalflow.core.string_parser import JsonParser # improve a list of json\n", + "from adalflow.core.string_parser import JsonParser # improve a list of json\n", "\n", "preset_prompt_kwargs = {\n", - " \"tools\": [tool.definition.to_yaml() for tool in tools],\n", - " \"output_format_str\": parser.format_instructions(exclude=[\"thought\"])\n", - " }\n", + " \"tools\": [tool.definition.to_yaml() for tool in tools],\n", + " \"output_format_str\": parser.format_instructions(exclude=[\"thought\"]),\n", + "}\n", "multi_call_gen = Generator(\n", " model_client=ModelClientType.OPENAI(),\n", " model_kwargs=model_kwargs,\n", " template=multple_function_call_template,\n", " prompt_kwargs=preset_prompt_kwargs,\n", - " output_processors=JsonParser()\n", + " output_processors=JsonParser(),\n", ")\n", "print(multi_call_gen)\n", "multi_call_gen.print_prompt()" @@ -1882,8 +1919,12 @@ } ], "source": [ - "def execute_function_by_parsing(func_expr: FunctionExpression, all_functions_dict: Dict[str, Any]) -> Any:\n", - " func: Function = FunctionTool.parse_function_call_expr(expr=func_expr.action, context_map=all_functions_dict)\n", + "def execute_function_by_parsing(\n", + " func_expr: FunctionExpression, all_functions_dict: Dict[str, Any]\n", + ") -> Any:\n", + " func: Function = FunctionTool.parse_function_call_expr(\n", + " expr=func_expr.action, context_map=all_functions_dict\n", + " )\n", " print(func)\n", " fun_output = all_functions_dict[func.name](*func.args, **func.kwargs)\n", " print(\"func output:\", fun_output)\n", @@ -1900,7 +1941,10 @@ " print(\"func output:\", fun_output)\n", " return fun_output\n", "\n", - "def execute_function_by_sandbox(func_expr: FunctionExpression, all_functions_dict: Dict[str, Any]) -> Any:\n", + "\n", + "def execute_function_by_sandbox(\n", + " func_expr: FunctionExpression, all_functions_dict: Dict[str, Any]\n", + ") -> Any:\n", " # sandbox_exec\n", " action = \"output=\" + func_expr.action\n", " result = sandbox_exec(action, context={**SAFE_BUILTINS, **all_functions_dict})\n", @@ -1909,10 +1953,8 @@ " return result\n", "\n", "\n", - "\n", - "\n", "for i in range(0, len(queries), 2):\n", - " query = \" and \".join(queries[i:i+2])\n", + " query = \" and \".join(queries[i : i + 2])\n", " print(f\"Query: {query}\\n_________________________\\n\")\n", " prompt_kwargs = {\"input_str\": query}\n", " result = multi_call_gen(prompt_kwargs=prompt_kwargs)\n", @@ -1925,32 +1967,46 @@ " print(func_exprs)\n", " except Exception as e:\n", " print(e)\n", - " print(f\"Failed to parse the function for query: {query}, func: {result.data}, error: {e}\")\n", + " print(\n", + " f\"Failed to parse the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )\n", " continue\n", " try:\n", - " func_outputs_1 = [execute_function_by_parsing(func_expr, all_functions_dict) for func_expr in func_exprs]\n", + " func_outputs_1 = [\n", + " execute_function_by_parsing(func_expr, all_functions_dict)\n", + " for func_expr in func_exprs\n", + " ]\n", " print(f\"fun_output by parsing: {func_outputs_1}\\n_________________________\\n\")\n", " except Exception as e:\n", " print(e)\n", - " print(f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\")\n", + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )\n", "\n", " try:\n", "\n", - " func_outputs_2 = [execute_function_by_eval(func_expr) for func_expr in func_exprs]\n", + " func_outputs_2 = [\n", + " execute_function_by_eval(func_expr) for func_expr in func_exprs\n", + " ]\n", " print(f\"fun_output by eval: {func_outputs_2}\\n_________________________\\n\")\n", " except Exception as e:\n", " print(e)\n", - " print(f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\")\n", + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )\n", "\n", " try:\n", "\n", - " func_outputs_3 = [execute_function_by_sandbox(func_expr, all_functions_dict) for func_expr in func_exprs]\n", + " func_outputs_3 = [\n", + " execute_function_by_sandbox(func_expr, all_functions_dict)\n", + " for func_expr in func_exprs\n", + " ]\n", " print(f\"fun_output by sandbox: {func_outputs_3}\\n_________________________\\n\")\n", " except Exception as e:\n", " print(e)\n", - " print(f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\")\n", - "\n", - " \n" + " print(\n", + " f\"Failed to execute the function for query: {query}, func: {result.data}, error: {e}\"\n", + " )" ] }, { @@ -1975,6 +2031,7 @@ "\n", "client = OpenAI()\n", "\n", + "\n", "# Example dummy function hard coded to return the same weather\n", "# In production, this could be your backend API or an external API\n", "def get_current_weather(location, unit=\"fahrenheit\"):\n", @@ -1982,15 +2039,23 @@ " if \"tokyo\" in location.lower():\n", " return json.dumps({\"location\": \"Tokyo\", \"temperature\": \"10\", \"unit\": unit})\n", " elif \"san francisco\" in location.lower():\n", - " return json.dumps({\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": unit})\n", + " return json.dumps(\n", + " {\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": unit}\n", + " )\n", " elif \"paris\" in location.lower():\n", " return json.dumps({\"location\": \"Paris\", \"temperature\": \"22\", \"unit\": unit})\n", " else:\n", " return json.dumps({\"location\": location, \"temperature\": \"unknown\"})\n", "\n", + "\n", "def run_conversation():\n", " # Step 1: send the conversation and available functions to the model\n", - " messages = [{\"role\": \"user\", \"content\": \"What's the weather like in San Francisco, Tokyo, and Paris in celsius?\"}]\n", + " messages = [\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": \"What's the weather like in San Francisco, Tokyo, and Paris in celsius?\",\n", + " }\n", + " ]\n", " tools = [\n", " {\n", " \"type\": \"function\",\n", @@ -2034,11 +2099,13 @@ " for tool_call in tool_calls:\n", " function_name = tool_call.function.name\n", " function_to_call = available_functions[function_name]\n", - " function_args = json.loads(tool_call.function.arguments)# use json.loads to convert a string to a dictionary\n", + " function_args = json.loads(\n", + " tool_call.function.arguments\n", + " ) # use json.loads to convert a string to a dictionary\n", " # function_response = function_to_call(\n", " # location=function_args.get(\"location\"),\n", " # unit=function_args.get(\"unit\"),\n", - " # ) \n", + " # )\n", " # you have to exactly know the arguments, this does not make sense. How would i know its arguments. **function_args (makes more sense)\n", " function_response = function_to_call(**function_args)\n", " messages.append(\n", @@ -2054,6 +2121,8 @@ " messages=messages,\n", " ) # get a new response from the model where it can see the function response\n", " return second_response\n", + "\n", + "\n", "print(run_conversation())" ] }, @@ -2109,18 +2178,17 @@ "outputs": [], "source": [ "def get_current_weather(location: str, unit: str = \"fahrenheit\"):\n", - " \"\"\"Get the current weather in a given location\"\"\"\n", - " if \"tokyo\" in location.lower():\n", - " return json.dumps({\"location\": \"Tokyo\", \"temperature\": \"10\", \"unit\": unit})\n", - " elif \"san francisco\" in location.lower():\n", - " return json.dumps(\n", - " {\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": unit}\n", - " )\n", - " elif \"paris\" in location.lower():\n", - " return json.dumps({\"location\": \"Paris\", \"temperature\": \"22\", \"unit\": unit})\n", - " else:\n", - " return json.dumps({\"location\": location, \"temperature\": \"unknown\"})\n", - "\n" + " \"\"\"Get the current weather in a given location\"\"\"\n", + " if \"tokyo\" in location.lower():\n", + " return json.dumps({\"location\": \"Tokyo\", \"temperature\": \"10\", \"unit\": unit})\n", + " elif \"san francisco\" in location.lower():\n", + " return json.dumps(\n", + " {\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": unit}\n", + " )\n", + " elif \"paris\" in location.lower():\n", + " return json.dumps({\"location\": \"Paris\", \"temperature\": \"22\", \"unit\": unit})\n", + " else:\n", + " return json.dumps({\"location\": location, \"temperature\": \"unknown\"})" ] }, { @@ -2134,21 +2202,29 @@ "from adalflow.core.base_data_class import DataClass\n", "from dataclasses import dataclass, field\n", "\n", + "\n", "@dataclass\n", "class Weather(DataClass):\n", - " location: str = field(metadata={\"description\": \"The city and state, e.g. San Francisco, CA\"})\n", + " location: str = field(\n", + " metadata={\"description\": \"The city and state, e.g. San Francisco, CA\"}\n", + " )\n", " unit: str = field(metadata={\"enum\": [\"celsius\", \"fahrenheit\"]})\n", "\n", + "\n", "def get_current_weather_2(weather: Weather):\n", " \"\"\"Get the current weather in a given location\"\"\"\n", " if \"tokyo\" in weather.location.lower():\n", - " return json.dumps({\"location\": \"Tokyo\", \"temperature\": \"10\", \"unit\": weather.unit})\n", + " return json.dumps(\n", + " {\"location\": \"Tokyo\", \"temperature\": \"10\", \"unit\": weather.unit}\n", + " )\n", " elif \"san francisco\" in weather.location.lower():\n", " return json.dumps(\n", " {\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": weather.unit}\n", " )\n", " elif \"paris\" in weather.location.lower():\n", - " return json.dumps({\"location\": \"Paris\", \"temperature\": \"22\", \"unit\": weather.unit})\n", + " return json.dumps(\n", + " {\"location\": \"Paris\", \"temperature\": \"22\", \"unit\": weather.unit}\n", + " )\n", " else:\n", " return json.dumps({\"location\": weather.location, \"temperature\": \"unknown\"})" ] @@ -2211,8 +2287,7 @@ "\n", "tool_2 = FunctionTool.from_defaults(fn=get_current_weather_2)\n", "\n", - "print(tool_2.metadata.to_json())\n", - "\n" + "print(tool_2.metadata.to_json())" ] }, { @@ -2229,38 +2304,23 @@ "metadata": {}, "outputs": [], "source": [ - "adalflow_fn_schema ={\n", - " \"type\": \"object\",\n", - " \"properties\": {\n", - " \"weather\": {\n", - " \"type\": \"Weather\",\n", - " \"desc\": \"The city and state, e.g. San Francisco, CA\",\n", - " \"enum\": [\n", - " \"celsius\",\n", - " \"fahrenheit\"\n", - " ]\n", - " }\n", - " },\n", - " \"required\": [\n", - " \"weather\"\n", - " ],\n", - " \"definitions\": {\n", - " \"weather\": {\n", - " \"type\": \"object\",\n", - " \"properties\": {\n", - " \"location\": {\n", - " \"type\": \"str\"\n", - " },\n", - " \"unit\": {\n", - " \"type\": \"str\"\n", - " }\n", - " },\n", - " \"required\": [\n", - " \"location\",\n", - " \"unit\"\n", - " ]\n", - " }\n", + "adalflow_fn_schema = {\n", + " \"type\": \"object\",\n", + " \"properties\": {\n", + " \"weather\": {\n", + " \"type\": \"Weather\",\n", + " \"desc\": \"The city and state, e.g. San Francisco, CA\",\n", + " \"enum\": [\"celsius\", \"fahrenheit\"],\n", + " }\n", + " },\n", + " \"required\": [\"weather\"],\n", + " \"definitions\": {\n", + " \"weather\": {\n", + " \"type\": \"object\",\n", + " \"properties\": {\"location\": {\"type\": \"str\"}, \"unit\": {\"type\": \"str\"}},\n", + " \"required\": [\"location\", \"unit\"],\n", " }\n", + " },\n", "}" ] }, @@ -2333,7 +2393,7 @@ } ], "source": [ - "# prepare function tool \n", + "# prepare function tool\n", "weather_tool = FunctionTool.from_defaults(fn=_get_current_weather)\n", "print(weather_tool)" ] @@ -2395,7 +2455,7 @@ } ], "source": [ - "# prepare a minimal function calling template \n", + "# prepare a minimal function calling template\n", "template = r\"\"\"You have these tools available:\n", " \n", " {% for tool in tools %}\n", @@ -2434,11 +2494,13 @@ "\n", "model_kwargs = {\"model\": \"gpt-3.5-turbo\", \"temperature\": 0.3, \"stream\": False}\n", "\n", + "\n", "@dataclass\n", "class Function(DataClass):\n", " name: str = field(metadata={\"desc\": \"The name of the function\"})\n", " args: Dict[str, Any] = field(metadata={\"desc\": \"The arguments of the function\"})\n", "\n", + "\n", "generator = Generator(\n", " model_client=ModelClientType.OPENAI(),\n", " model_kwargs=model_kwargs,\n", @@ -2535,9 +2597,7 @@ "source": [ "# call the function\n", "\n", - "function_map = {\n", - " \"_get_current_weather\": weather_tool\n", - "}\n", + "function_map = {\"_get_current_weather\": weather_tool}\n", "\n", "function_name = structured_output.name\n", "function_args = structured_output.args\n", @@ -2695,22 +2755,30 @@ "from dataclasses import dataclass, field\n", "from typing import Any, Dict\n", "\n", + "\n", "@dataclass\n", "class Address:\n", " street: str\n", " city: str\n", " zipcode: str\n", "\n", + "\n", "@dataclass\n", "class Person:\n", " name: str\n", " age: int\n", " address: Address\n", "\n", + "\n", "# Example instance of the nested dataclasses\n", - "person = Person(name=\"John Doe\", age=30, address=Address(street=\"123 Main St\", city=\"Anytown\", zipcode=\"12345\"))\n", + "person = Person(\n", + " name=\"John Doe\",\n", + " age=30,\n", + " address=Address(street=\"123 Main St\", city=\"Anytown\", zipcode=\"12345\"),\n", + ")\n", "print(person)\n", "\n", + "\n", "def to_dict(obj: Any) -> Dict[str, Any]:\n", " if hasattr(obj, \"__dataclass_fields__\"):\n", " return {key: to_dict(value) for key, value in obj.__dict__.items()}\n", @@ -2721,6 +2789,7 @@ " else:\n", " return obj\n", "\n", + "\n", "# Convert the person instance to a dictionary\n", "person_dict = to_dict(person)\n", "print(person_dict)" @@ -2741,20 +2810,31 @@ ], "source": [ "from typing import List\n", + "\n", + "\n", "@dataclass\n", "class Address:\n", " street: str\n", " city: str\n", " zipcode: str\n", "\n", + "\n", "@dataclass\n", "class Person:\n", " name: str\n", " age: int\n", " addresses: List[Address]\n", "\n", + "\n", "# Example instance of the nested dataclasses\n", - "person = Person(name=\"John Doe\", age=30, addresses=[Address(street=\"123 Main St\", city=\"Anytown\", zipcode=\"12345\"), Address(street=\"456 Elm St\", city=\"Othertown\", zipcode=\"67890\")])\n", + "person = Person(\n", + " name=\"John Doe\",\n", + " age=30,\n", + " addresses=[\n", + " Address(street=\"123 Main St\", city=\"Anytown\", zipcode=\"12345\"),\n", + " Address(street=\"456 Elm St\", city=\"Othertown\", zipcode=\"67890\"),\n", + " ],\n", + ")\n", "print(person)" ] }, @@ -2795,6 +2875,8 @@ ], "source": [ "from typing import List, Dict, Optional\n", + "\n", + "\n", "def dataclass_obj_to_dict(\n", " obj: Any, exclude: Optional[Dict[str, List[str]]] = None, parent_key: str = \"\"\n", ") -> Dict[str, Any]:\n", @@ -2851,24 +2933,30 @@ " else:\n", " return obj\n", "\n", + "\n", "from dataclasses import dataclass\n", "from typing import List\n", "\n", + "\n", "@dataclass\n", "class TrecData:\n", " question: str\n", " label: int\n", "\n", + "\n", "@dataclass\n", "class TrecDataList:\n", "\n", " data: List[TrecData]\n", " name: str\n", "\n", + "\n", "trec_data = TrecData(question=\"What is the capital of France?\", label=0)\n", "trec_data_list = TrecDataList(data=[trec_data], name=\"trec_data_list\")\n", "\n", - "dataclass_obj_to_dict(trec_data_list, exclude={\"TrecData\": [\"label\"], \"TrecDataList\": [\"name\"]})" + "dataclass_obj_to_dict(\n", + " trec_data_list, exclude={\"TrecData\": [\"label\"], \"TrecDataList\": [\"name\"]}\n", + ")" ] }, { @@ -2878,14 +2966,24 @@ "outputs": [], "source": [ "from typing import Type\n", + "\n", + "\n", "def dataclass_obj_from_dict(cls: Type[Any], data: Dict[str, Any]) -> Any:\n", " if hasattr(cls, \"__dataclass_fields__\"):\n", " fieldtypes = {f.name: f.type for f in cls.__dataclass_fields__.values()}\n", - " return cls(**{key: dataclass_obj_from_dict(fieldtypes[key], value) for key, value in data.items()})\n", + " return cls(\n", + " **{\n", + " key: dataclass_obj_from_dict(fieldtypes[key], value)\n", + " for key, value in data.items()\n", + " }\n", + " )\n", " elif isinstance(data, list):\n", " return [dataclass_obj_from_dict(cls.__args__[0], item) for item in data]\n", " elif isinstance(data, dict):\n", - " return {key: dataclass_obj_from_dict(cls.__args__[1], value) for key, value in data.items()}\n", + " return {\n", + " key: dataclass_obj_from_dict(cls.__args__[1], value)\n", + " for key, value in data.items()\n", + " }\n", " else:\n", " return data" ] @@ -2933,7 +3031,12 @@ } ], "source": [ - "dataclass_obj_from_dict(TrecDataList, dataclass_obj_to_dict(trec_data_list, exclude={\"TrecData\": [\"label\"], \"TrecDataList\": [\"name\"]}))" + "dataclass_obj_from_dict(\n", + " TrecDataList,\n", + " dataclass_obj_to_dict(\n", + " trec_data_list, exclude={\"TrecData\": [\"label\"], \"TrecDataList\": [\"name\"]}\n", + " ),\n", + ")" ] } ], diff --git a/use_cases/agent/react_agent.ipynb b/use_cases/agent/react_agent.ipynb index cdc199fe..a93cb89e 100644 --- a/use_cases/agent/react_agent.ipynb +++ b/use_cases/agent/react_agent.ipynb @@ -43,6 +43,7 @@ "source": [ "# load the dataset\n", "from datasets import load_dataset\n", + "\n", "dataset = load_dataset(path=\"hotpot_qa\", name=\"fullwiki\")" ] }, @@ -114,7 +115,6 @@ } ], "source": [ - "\n", "import dotenv\n", "from adalflow.components.model_client import OpenAIClient\n", "from adalflow.components.agent.react_agent import ReActAgent\n", @@ -150,15 +150,17 @@ "import re\n", "import string\n", "\n", + "\n", "# copy code from the paper\n", "def clean_str(p):\n", - " return p.encode().decode(\"unicode-escape\").encode(\"latin1\").decode(\"utf-8\")\n", + " return p.encode().decode(\"unicode-escape\").encode(\"latin1\").decode(\"utf-8\")\n", + "\n", "\n", "# normalization copied from the paper's code\n", "def normalize_answer(s):\n", " def remove_articles(text):\n", " return re.sub(r\"\\b(a|an|the)\\b\", \" \", text)\n", - " \n", + "\n", " def white_space_fix(text):\n", " return \" \".join(text.split())\n", "\n", @@ -171,6 +173,7 @@ "\n", " return white_space_fix(remove_articles(remove_punc(lower(s))))\n", "\n", + "\n", "def search(entity: str) -> str:\n", " \"\"\"\n", " searches the exact entity on Wikipedia and returns the first paragraph if it exists. If not, it will return some similar entities to search.\n", @@ -178,29 +181,33 @@ " # Format the entity for URL encoding\n", " entity_formatted = entity.replace(\" \", \"+\")\n", " url = f\"https://en.wikipedia.org/w/index.php?search={entity_formatted}\"\n", - " \n", + "\n", " # Fetch the page\n", " response = requests.get(url)\n", - " soup = BeautifulSoup(response.text, 'html.parser')\n", - " \n", + " soup = BeautifulSoup(response.text, \"html.parser\")\n", + "\n", " # Check if the exact page was found or suggest similar items\n", " # when
is detected, it means the entity page is not found on wikipedia\n", " result_divs = soup.find_all(\"div\", {\"class\": \"mw-search-result-heading\"})\n", - " \n", - " if result_divs: # this means the searched entity page is not in wikipedia, wikipedia will show a list of similar entities\n", + "\n", + " if (\n", + " result_divs\n", + " ): # this means the searched entity page is not in wikipedia, wikipedia will show a list of similar entities\n", " # get Similar results\n", " similar_titles = [div.a.get_text() for div in result_divs]\n", - " return f\"Could not find exact page for '{entity}'. Similar topics: {similar_titles[:5]}\" # return the top 5 similar titles\n", + " return f\"Could not find exact page for '{entity}'. Similar topics: {similar_titles[:5]}\" # return the top 5 similar titles\n", " else:\n", " # the paper uses page to represent content in

\n", " # Extract xontent\n", - " page_list = [p.get_text().strip() for p in soup.find_all(\"p\") + soup.find_all(\"ul\")]\n", + " page_list = [\n", + " p.get_text().strip() for p in soup.find_all(\"p\") + soup.find_all(\"ul\")\n", + " ]\n", " # TODO: Recursive search, if find any concept that needs more search then call search again\n", " # if any(\"may refer to:\" in p for p in page_list):\n", " # search(entity)\n", "\n", " # restructure & clean the page content following the paper's logic\n", - " page = ''\n", + " page = \"\"\n", " for p in page_list:\n", " if len(p.split(\" \")) > 2:\n", " page += clean_str(p)\n", @@ -208,31 +215,39 @@ " page += \"\\n\"\n", " paragraphs = page.split(\"\\n\")\n", " paragraphs = [p.strip() for p in paragraphs if p.strip()]\n", - " \n", + "\n", " sentences = []\n", " for p in paragraphs:\n", - " sentences += p.split('. ')\n", - " sentences = [s.strip() + '.' for s in sentences if s.strip()]\n", - " \n", + " sentences += p.split(\". \")\n", + " sentences = [s.strip() + \".\" for s in sentences if s.strip()]\n", + "\n", " # return the first 5 sentences\n", " if sentences:\n", - " return ' '.join(sentences[:5]) if len(sentences)>=5 else ' '.join(sentences)\n", + " return (\n", + " \" \".join(sentences[:5]) if len(sentences) >= 5 else \" \".join(sentences)\n", + " )\n", " else:\n", " return \"No content found on this page.\"\n", - " \n", + "\n", " # TODO: clean the paragraphs and return the searched content\n", "\n", "\n", "def lookup(text: str, keyword: str) -> str:\n", " \"\"\"\n", - " returns the sentences containing keyword in the current passage.\n", + " returns the sentences containing keyword in the current passage.\n", " \"\"\"\n", - " sentences = text.split('.')\n", - " matching_sentences = [sentence.strip() + '.' for sentence in sentences if keyword.lower() in sentence.lower()]\n", + " sentences = text.split(\".\")\n", + " matching_sentences = [\n", + " sentence.strip() + \".\"\n", + " for sentence in sentences\n", + " if keyword.lower() in sentence.lower()\n", + " ]\n", " if not matching_sentences:\n", " return \"No sentences found with the keyword.\"\n", " else:\n", - " return ' '.join(matching_sentences) # Join all matching sentences into a single string" + " return \" \".join(\n", + " matching_sentences\n", + " ) # Join all matching sentences into a single string" ] }, { @@ -262,7 +277,7 @@ "outputs": [], "source": [ "examples = [\n", - "\"\"\"Question: What is the elevation range for the area that the eastern sector of the Colorado orogeny extends into?\n", + " \"\"\"Question: What is the elevation range for the area that the eastern sector of the Colorado orogeny extends into?\n", "Thought 1: I need to search Colorado orogeny, find the area that the eastern sector of the Colorado orogeny extends into, then find the elevation range of the area.\n", "Action 1: search(\"Colorado orogeny\")\n", "Observation 1: The Colorado orogeny was an episode of mountain building (an orogeny) in Colorado and surrounding areas.\n", @@ -277,7 +292,7 @@ "Observation 4: The High Plains are a subregion of the Great Plains. From east to west, the High Plains rise in elevation from around 1,800 to 7,000 ft (550 to 2,130 m).[3]\n", "Thought 5: High Plains rise in elevation from around 1,800 to 7,000 ft, so the answer is 1,800 to 7,000 ft.\n", "Action 5: finish(\"1,800 to 7,000 ft\")\"\"\",\n", - "\"\"\"Question: Musician and satirist Allie Goertz wrote a song about the \"The Simpsons\" character Milhouse, who Matt Groening named after who?\n", + " \"\"\"Question: Musician and satirist Allie Goertz wrote a song about the \"The Simpsons\" character Milhouse, who Matt Groening named after who?\n", "Thought 1: The question simplifies to \"The Simpsons\" character Milhouse is named after who. I only need to search Milhouse and find who it is named after.\n", "Action 1: search(\"Milhouse\")\n", "Observation 1: Milhouse Mussolini Van Houten is a recurring character in the Fox animated television series The Simpsons voiced by Pamela Hayden and created by Matt Groening.\n", @@ -286,7 +301,7 @@ "Observation 2: (Result 1 / 1) Milhouse was named after U.S. president Richard Nixon, whose middle name was Milhous. \n", "Thought 3: Milhouse was named after U.S. president Richard Nixon, so the answer is Richard Nixon.\n", "Action 3: finish(\"Richard Nixon\")\"\"\",\n", - "\"\"\"Question: Which documentary is about Finnish rock groups, Adam Clayton Powell or The Saimaa Gesture?\n", + " \"\"\"Question: Which documentary is about Finnish rock groups, Adam Clayton Powell or The Saimaa Gesture?\n", "Thought 1: I need to search Adam Clayton Powell and The Saimaa Gesture, and find which documentary is about Finnish rock groups.\n", "Action 1: search(\"Adam Clayton Powell\")\n", "Observation 1: Could not find [\"Adam Clayton Powell\"]. Similar: ['Adam Clayton Powell III', 'Seventh Avenue (Manhattan)', 'Adam Clayton Powell Jr. State Office Building', 'Isabel Washington Powell', 'Adam Powell', 'Adam Clayton Powell (film)', 'Giancarlo Esposito'].\n", @@ -296,7 +311,7 @@ "The film is about the rise and fall of influential African-American politician Adam Clayton Powell Jr.[3][4] It was later aired as part of the PBS series The American Experience.\n", "Thought 3: Adam Clayton Powell (film) is a documentary about an African-American politician, not Finnish rock groups. So the documentary about Finnish rock groups must instead be The Saimaa Gesture.\n", "Action 3: finish(\"The Saimaa Gesture\")\"\"\",\n", - "\"\"\"Question: What profession does Nicholas Ray and Elia Kazan have in common?\n", + " \"\"\"Question: What profession does Nicholas Ray and Elia Kazan have in common?\n", "Thought 1: I need to search Nicholas Ray and Elia Kazan, find their professions, then find the profession they have in common.\n", "Action 1: search(\"Nicholas Ray\")\n", "Observation 1: Nicholas Ray (born Raymond Nicholas Kienzle Jr., August 7, 1911 – June 16, 1979) was an American film director, screenwriter, and actor best known for the 1955 film Rebel Without a Cause.\n", @@ -305,7 +320,7 @@ "Observation 2: Elia Kazan was an American film and theatre director, producer, screenwriter and actor.\n", "Thought 3: Professions of Elia Kazan are director, producer, screenwriter, and actor. So profession Nicholas Ray and Elia Kazan have in common is director, screenwriter, and actor.\n", "Action 3: finish(\"director, screenwriter, actor\")\"\"\",\n", - "\"\"\"Question: Which magazine was started first Arthur's Magazine or First for Women?\n", + " \"\"\"Question: Which magazine was started first Arthur's Magazine or First for Women?\n", "Thought 1: I need to search Arthur's Magazine and First for Women, and find which was started first.\n", "Action 1: search(\"Arthur's Magazine\")\n", "Observation 1: Arthur's Magazine (1844-€“1846) was an American literary periodical published in Philadelphia in the 19th century. \n", @@ -314,7 +329,7 @@ "Observation 2: First for Women is a woman's magazine published by Bauer Media Group in the USA.[1] The magazine was started in 1989. \n", "Thought 3: First for Women was started in 1989. 1844 (Arthur's Magazine) < 1989 (First for Women), so Arthur's Magazine was started first.\n", "Action 3: finish(\"Arthur's Magazine\")\"\"\",\n", - "\"\"\"Question: Were Pavel Urysohn and Leonid Levin known for the same type of work?\n", + " \"\"\"Question: Were Pavel Urysohn and Leonid Levin known for the same type of work?\n", "Thought 1: I need to search Pavel Urysohn and Leonid Levin, find their types of work, then find if they are the same.\n", "Action 1: search(\"Pavel Urysohn\")\n", "Observation 1: Pavel Samuilovich Urysohn (February 3, 1898 – August 17, 1924) was a Soviet mathematician who is best known for his contributions in dimension theory.\n", @@ -322,7 +337,7 @@ "Action 2: search(\"Leonid Levin\")\n", "Observation 2: Leonid Anatolievich Levin is a Soviet-American mathematician and computer scientist. \n", "Thought 3: Leonid Levin is a mathematician and computer scientist. So Pavel Urysohn and Leonid Levin have the same type of work. \n", - "Action 3: finish(\"yes\")\"\"\"\n", + "Action 3: finish(\"yes\")\"\"\",\n", "]" ] }, @@ -334,7 +349,7 @@ "source": [ "# preset up the examples as prompt_kwargs, the examples will be included in the system prompt\n", "\n", - "preset_prompt_kwargs = {\"examples\": examples} " + "preset_prompt_kwargs = {\"examples\": examples}" ] }, { @@ -353,8 +368,8 @@ "outputs": [], "source": [ "gpt_model_kwargs = {\n", - " \"model\": \"gpt-3.5-turbo\",\n", - " \"temperature\": 0.0,\n", + " \"model\": \"gpt-3.5-turbo\",\n", + " \"temperature\": 0.0,\n", "}" ] }, @@ -517,8 +532,11 @@ "# max_steps refers to how many thought-action round we allow the model to perform\n", "# to save resources, let's use 3 here\n", "agent = ReActAgent(\n", - " tools=tools, max_steps=3, model_client=OpenAIClient(),\n", - " model_kwargs=gpt_model_kwargs, preset_prompt_kwargs=preset_prompt_kwargs\n", + " tools=tools,\n", + " max_steps=3,\n", + " model_client=OpenAIClient(),\n", + " model_kwargs=gpt_model_kwargs,\n", + " preset_prompt_kwargs=preset_prompt_kwargs,\n", ")\n", "agent" ] @@ -542,7 +560,8 @@ "source": [ "import importlib\n", "import adalflow\n", - "importlib.reload(adalflow)\n" + "\n", + "importlib.reload(adalflow)" ] }, { @@ -681,13 +700,18 @@ "num_questions = 5\n", "for i in range(num_questions):\n", " question = val_dataset[i][\"question\"]\n", - " gt_answer = normalize_answer(val_dataset[i][\"answer\"]) # normalize the ground truth answer\n", - " \n", + " gt_answer = normalize_answer(\n", + " val_dataset[i][\"answer\"]\n", + " ) # normalize the ground truth answer\n", + "\n", " # get the agent's response\n", " pred_answer = agent(question)\n", " pred_answer = normalize_answer(pred_answer)\n", - " \n", - " printc(f\"question: {question}, ground truth: {gt_answer}, pred answer: {pred_answer}\", color=\"yellow\")\n" + "\n", + " printc(\n", + " f\"question: {question}, ground truth: {gt_answer}, pred answer: {pred_answer}\",\n", + " color=\"yellow\",\n", + " )" ] }, { @@ -995,8 +1019,11 @@ "FM_evaluator = AnswerMatchAcc(type=\"fuzzy_match\")\n", "\n", "agent = ReActAgent(\n", - " tools=tools, max_steps=7, model_client=OpenAIClient(),\n", - " model_kwargs=gpt_model_kwargs, preset_prompt_kwargs=preset_prompt_kwargs\n", + " tools=tools,\n", + " max_steps=7,\n", + " model_client=OpenAIClient(),\n", + " model_kwargs=gpt_model_kwargs,\n", + " preset_prompt_kwargs=preset_prompt_kwargs,\n", ")\n", "\n", "num_questions = 10\n", @@ -1005,18 +1032,23 @@ "start_time = time.time()\n", "for i in range(num_questions):\n", " question = val_dataset[i][\"question\"]\n", - " gt_answer = normalize_answer(val_dataset[i][\"answer\"]) # normalize the ground truth answer\n", + " gt_answer = normalize_answer(\n", + " val_dataset[i][\"answer\"]\n", + " ) # normalize the ground truth answer\n", " gt_answers.append(gt_answer)\n", - " \n", + "\n", " # get the agent's response\n", " pred_answer = agent(question)\n", " pred_answer = normalize_answer(pred_answer)\n", " pred_answers.append(pred_answer)\n", - " \n", - " printc(f\"No. {i+1}, question: {question}, ground truth: {gt_answer}, pred answer: {pred_answer}\", color=\"yellow\")\n", + "\n", + " printc(\n", + " f\"No. {i+1}, question: {question}, ground truth: {gt_answer}, pred answer: {pred_answer}\",\n", + " color=\"yellow\",\n", + " )\n", "\n", "end_time = time.time()\n", - " \n", + "\n", "em = EM_evaluator.compute(pred_answers=pred_answers, gt_answers=gt_answers)\n", "fm = FM_evaluator.compute(pred_answers=pred_answers, gt_answers=gt_answers)\n", "avg_time = (end_time - start_time) / num_questions\n", @@ -1262,8 +1294,7 @@ "FM_evaluator = AnswerMatchAcc(type=\"fuzzy_match\")\n", "\n", "agent = ReActAgent(\n", - " max_steps=7, model_client=OpenAIClient(),\n", - " model_kwargs=gpt_model_kwargs\n", + " max_steps=7, model_client=OpenAIClient(), model_kwargs=gpt_model_kwargs\n", ")\n", "\n", "num_questions = 10\n", @@ -1272,18 +1303,23 @@ "start_time = time.time()\n", "for i in range(num_questions):\n", " question = val_dataset[i][\"question\"]\n", - " gt_answer = normalize_answer(val_dataset[i][\"answer\"]) # normalize the ground truth answer\n", + " gt_answer = normalize_answer(\n", + " val_dataset[i][\"answer\"]\n", + " ) # normalize the ground truth answer\n", " gt_answers.append(gt_answer)\n", - " \n", + "\n", " # get the agent's response\n", " pred_answer = agent(question)\n", " pred_answer = normalize_answer(pred_answer)\n", " pred_answers.append(pred_answer)\n", - " \n", - " printc(f\"No. {i+1}, question: {question}, ground truth: {gt_answer}, pred answer: {pred_answer}\", color=\"yellow\")\n", + "\n", + " printc(\n", + " f\"No. {i+1}, question: {question}, ground truth: {gt_answer}, pred answer: {pred_answer}\",\n", + " color=\"yellow\",\n", + " )\n", "\n", "end_time = time.time()\n", - " \n", + "\n", "em = EM_evaluator.compute(pred_answers=pred_answers, gt_answers=gt_answers)\n", "fm = FM_evaluator.compute(pred_answers=pred_answers, gt_answers=gt_answers)\n", "avg_time = (end_time - start_time) / num_questions\n", diff --git a/use_cases/generator/intermediate.ipynb b/use_cases/generator/intermediate.ipynb index 80f8cff8..fb4dc8a8 100644 --- a/use_cases/generator/intermediate.ipynb +++ b/use_cases/generator/intermediate.ipynb @@ -30,7 +30,10 @@ "from adalflow.core import Component, Generator, Sequential\n", "from adalflow.components.model_client import OpenAIClient\n", "from adalflow.components.model_client import GroqAPIClient\n", - "from adalflow.utils import setup_env # make sure you have a .env file with OPENAI_API_KEY and GROQ_API_KEY\n", + "from adalflow.utils import (\n", + " setup_env,\n", + ") # make sure you have a .env file with OPENAI_API_KEY and GROQ_API_KEY\n", + "\n", "setup_env(\".env\")" ] }, @@ -72,6 +75,7 @@ "source": [ "# Let's turn on the library log to help with debugging.\n", "from adalflow.utils import get_logger\n", + "\n", "get_logger()" ] }, @@ -248,6 +252,8 @@ "# Router component\n", "\n", "from typing import Dict\n", + "\n", + "\n", "class Router(Component):\n", " def __init__(self, choices: Dict[str, str] = {}):\n", " super().__init__()\n", @@ -260,9 +266,9 @@ "\n", " def call(self, query: str) -> str:\n", " prompt_kwargs = {\"input_str\": query, \"choices\": self.choices}\n", - " choice = self.router(prompt_kwargs=prompt_kwargs).data\n", + " choice = self.router(prompt_kwargs=prompt_kwargs).data\n", " return {\"choice\": choice, \"query\": query}\n", - " \n", + "\n", " def _extra_repr(self):\n", " return f\"Choices: {self.choices}, \"" ] @@ -329,6 +335,7 @@ "source": [ "# the second chat component with two generators\n", "\n", + "\n", "class Chat(Component):\n", " def __init__(self):\n", " super().__init__()\n", @@ -342,6 +349,7 @@ " model_client=GroqAPIClient(),\n", " model_kwargs={\"model\": \"llama3-8b-8192\"},\n", " )\n", + "\n", " # to chain together just to make sure the output can be directly passed to the next as input\n", " def call(self, input: Dict[str, str]) -> Dict[str, str]:\n", " choice = input.get(\"choice\", None)\n", @@ -412,7 +420,9 @@ "class QAWithRouter(Component):\n", " def __init__(self):\n", " super().__init__()\n", - " self.router = Router(choices={\"doctor\": \"Doctor\", \"lawyer\": \"Lawyer\", \"other\": \"Other\"})\n", + " self.router = Router(\n", + " choices={\"doctor\": \"Doctor\", \"lawyer\": \"Lawyer\", \"other\": \"Other\"}\n", + " )\n", " self.chat = Chat()\n", " self.pipeline = Sequential(self.router, self.chat)\n", "\n", diff --git a/use_cases/question_answering/chatbot.ipynb b/use_cases/question_answering/chatbot.ipynb index 3db858a4..7ed71347 100644 --- a/use_cases/question_answering/chatbot.ipynb +++ b/use_cases/question_answering/chatbot.ipynb @@ -21,6 +21,7 @@ "outputs": [], "source": [ "from IPython.display import clear_output\n", + "\n", "!pip install -U adalflow[openai,groq,faiss-cpu]\n", "clear_output()" ] @@ -37,7 +38,9 @@ "from adalflow.core.component import Component\n", "from adalflow.core.generator import Generator\n", "from adalflow.components.memory.memory import Memory\n", - "from adalflow.components.model_client import OpenAIClient # Here, we use the OpenAIClient as an example, but you can use any other clients (with the corresponding API Key as needed), such as AnthropicAPIClient" + "from adalflow.components.model_client import (\n", + " OpenAIClient,\n", + ") # Here, we use the OpenAIClient as an example, but you can use any other clients (with the corresponding API Key as needed), such as AnthropicAPIClient" ] }, { @@ -49,7 +52,7 @@ "# Prompt user to enter their API keys securely\n", "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", "# Set environment variables\n", - "os.environ['OPENAI_API_KEY'] = openai_api_key\n", + "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n", "# Replace with your OpenAI API Key, or you can put it in a .env file" ] }, @@ -64,11 +67,10 @@ " def __init__(self):\n", " super().__init__()\n", " self.generator = Generator(\n", - " model_client=OpenAIClient(),\n", - " model_kwargs={'model': 'gpt-4o-mini'}\n", + " model_client=OpenAIClient(), model_kwargs={\"model\": \"gpt-4o-mini\"}\n", " )\n", - " self.chat_history = Memory() # Memory to store the chat history\n", - " \n", + " self.chat_history = Memory() # Memory to store the chat history\n", + "\n", " def call(self) -> str:\n", " print(\"Welcome to the ChatBot. Type anything to chat. Type 'exit' to end.\")\n", " while True:\n", @@ -90,6 +92,7 @@ " )\n", " print(f\"ChatBot: {response}\")\n", "\n", + "\n", "chatbot = ChatBot()\n", "print(chatbot)" ] diff --git a/use_cases/question_answering/simple_qa.ipynb b/use_cases/question_answering/simple_qa.ipynb index 67dc9b04..cec10d76 100644 --- a/use_cases/question_answering/simple_qa.ipynb +++ b/use_cases/question_answering/simple_qa.ipynb @@ -32,7 +32,10 @@ "outputs": [], "source": [ "# Here, we use the OpenAIClient as an example, but you can use any other clients (with the corresponding API Key as needed), such as AnthropicAPIClient\n", - "from adalflow.utils import setup_env # make sure you have a .env file with OPENAI_API_KEY or any other key mentioned with respect to your usage\n", + "from adalflow.utils import (\n", + " setup_env,\n", + ") # make sure you have a .env file with OPENAI_API_KEY or any other key mentioned with respect to your usage\n", + "\n", "setup_env(\".env\")\n", "from adalflow.components.model_client import OpenAIClient" ] @@ -119,12 +122,12 @@ " def __init__(self):\n", " super().__init__()\n", " self.generator = Generator(\n", - " model_client=OpenAIClient(),\n", - " model_kwargs={'model': 'gpt-3.5-turbo'}\n", + " model_client=OpenAIClient(), model_kwargs={\"model\": \"gpt-3.5-turbo\"}\n", " )\n", "\n", " def call(self, query: str):\n", - " return self.generator.call(prompt_kwargs={'input_str': query})\n", + " return self.generator.call(prompt_kwargs={\"input_str\": query})\n", + "\n", "\n", "simple_qa = SimpleQA()\n", "print(simple_qa)"