diff --git a/coding/containers/biosandbox/env.yaml b/coding/containers/biosandbox/env.yaml index 28edb62..5e73fc2 100644 --- a/coding/containers/biosandbox/env.yaml +++ b/coding/containers/biosandbox/env.yaml @@ -19,4 +19,6 @@ dependencies: - genomepy>=0.16.1 - pyensembl - plotly - - GEOparse>=2.0.4 \ No newline at end of file + - GEOparse>=2.0.4 + - pybiomart + - scanpy \ No newline at end of file diff --git a/coding/just_agents/coding/tools.py b/coding/just_agents/coding/tools.py index 087e14d..0c69298 100644 --- a/coding/just_agents/coding/tools.py +++ b/coding/just_agents/coding/tools.py @@ -8,7 +8,33 @@ def run_bash_command(command: str): with MicromambaSession(image="ghcr.io/longevity-genie/just-agents/biosandbox:main", lang="python", keep_template=True, verbose=True) as session: result: ConsoleOutput = session.execute_command(command=command) return result - + +def validate_python_code_syntax(code: str, filename: str)-> str: + """ + code: str # python code to validate + filename: str # a filename to use in error messages + """ + try: + # Compile the code string to check for syntax errors + compiled_code = compile(code, f"/example/{filename}", "exec") + return ("Code syntax is correct") + except SyntaxError as e: + return (f"Syntax error in code: {e}") + +def save_text_to_runtime(text: str, filename: str): + """ + text: str # ptext to be saved + filename: str # a filename to use i + """ + with MicromambaSession(image="ghcr.io/longevity-genie/just-agents/biosandbox:main", lang="python", keep_template=True, verbose=True) as session: + + text_file = f"/tmp/{filename}" + dest_file = f"/example/{filename}" + with open(text_file, "w") as f: + f.write(text) + result: ConsoleOutput = session.copy_to_runtime(src=text_file, dest=dest_file) + return result + def run_python_code(code: str): """ diff --git a/core/just_agents/base_agent.py b/core/just_agents/base_agent.py index d62de49..c38a629 100644 --- a/core/just_agents/base_agent.py +++ b/core/just_agents/base_agent.py @@ -1,18 +1,16 @@ from pydantic import Field, PrivateAttr from typing import Optional, List, Union, Any, Generator - -from just_agents.core.interfaces.IMemory import IMemory -from just_agents.core.types import Role, AbstractMessage, SupportedMessages, SupportedMessage +from just_agents.types import Role, SupportedMessages from just_agents.llm_options import LLMOptions -from just_agents.streaming.protocols.interfaces.IFunctionCall import IFunctionCall -from just_agents.streaming.protocols.interfaces.IProtocolAdapter import IProtocolAdapter, BaseModelResponse -from just_agents.core.interfaces.IAgent import IAgentWithInterceptors, QueryListener, ResponseListener +from just_agents.interfaces.function_call import IFunctionCall +from just_agents.interfaces.protocol_adapter import IProtocolAdapter, BaseModelResponse +from just_agents.interfaces.agent import IAgentWithInterceptors, QueryListener, ResponseListener from just_agents.base_memory import IBaseMemory, BaseMemory from just_agents.just_profile import JustAgentProfile -from just_agents.core.rotate_keys import RotateKeys -from just_agents.streaming.protocol_factory import StreamingMode, ProtocolAdapterFactory +from just_agents.rotate_keys import RotateKeys +from just_agents.protocols.protocol_factory import StreamingMode, ProtocolAdapterFactory from litellm.litellm_core_utils.get_supported_openai_params import get_supported_openai_params @@ -64,10 +62,16 @@ class BaseAgent( key_list_path: Optional[str] = Field( default=None, exclude=True, - description="path to text file with list of api keys, one key per line") + description="Path to text file with list of api keys, one key per line") + + max_tool_calls: int = Field( + ge=1, + default=50, + description="A safeguard to prevent tool calls stuck in a loop") + drop_params: bool = Field( default=True, - description=" drop params from the request, useful for some models that do not support them") + description="Drop params from the request, useful for some models that do not support them") # Protected handlers implementation _on_query : List[QueryListener] = PrivateAttr(default_factory=list) @@ -78,7 +82,7 @@ class BaseAgent( _partial_streaming_chunks: List[BaseModelResponse] = PrivateAttr( default_factory=list) # Buffers streaming responses _key_getter: Optional[RotateKeys] = PrivateAttr(None) # Manages API key rotation - + _tool_fuse_broken: bool = PrivateAttr(False) #Fuse to prevent tool loops def instruct(self, prompt: str): #backward compatibility self.memory.add_message({"role": Role.system, "content": prompt}) @@ -87,7 +91,7 @@ def clear_memory(self) -> None: self.memory.clear_messages() self.instruct(self.system_prompt) - def deepcopy_memory(self) -> IMemory: + def deepcopy_memory(self) -> IBaseMemory: return self.memory.deepcopy() def add_to_memory(self, messages: SupportedMessages) -> None: @@ -129,7 +133,7 @@ def model_post_init(self, __context: Any) -> None: def _prepare_options(self, options: LLMOptions): opt = options.copy() - if self.tools is not None: # populate llm_options based on available tools + if self.tools is not None and not self._tool_fuse_broken: # populate llm_options based on available tools opt["tools"] = [{"type": "function", "function": self.tools[tool].get_litellm_description()} for tool in self.tools] return opt @@ -138,7 +142,7 @@ def _execute_completion( self, stream: bool, **kwargs - ) -> Union[AbstractMessage, BaseModelResponse]: + ) -> Union[SupportedMessages, BaseModelResponse]: opt = self._prepare_options(self.llm_options) opt.update(kwargs) @@ -172,7 +176,7 @@ def _execute_completion( return self._protocol.completion(messages=self.memory.messages, stream=stream, **opt) - def _process_function_calls(self, function_calls: List[IFunctionCall[AbstractMessage]]) -> SupportedMessages: + def _process_function_calls(self, function_calls: List[IFunctionCall[SupportedMessages]]) -> SupportedMessages: messages: SupportedMessages = [] for call in function_calls: msg = call.execute_function(lambda function_name: self.tools[function_name].get_callable()) @@ -182,39 +186,44 @@ def _process_function_calls(self, function_calls: List[IFunctionCall[AbstractMes return messages def query_with_current_memory(self, **kwargs): #former proceed() aka llm_think() - while True: + for step in range(self.max_tool_calls): # individual llm call, unpacking the message, processing handlers response = self._execute_completion(stream=False, **kwargs) - msg: AbstractMessage = self._protocol.message_from_response(response) # type: ignore + msg: SupportedMessage = self._protocol.message_from_response(response) # type: ignore self.handle_on_response(msg) self.add_to_memory(msg) - if not self.tools: + if not self.tools or self._tool_fuse_broken: + self._tool_fuse_broken = False break # If there are no tool calls or tools available, exit the loop tool_calls = self._protocol.tool_calls_from_message(msg) + # Process each tool call if they exist and re-execute query + self._process_function_calls( + tool_calls) # NOTE: no kwargs here as tool calls might need different parameters + if not tool_calls: break - # Process each tool call if they exist and re-execute query - self._process_function_calls(tool_calls) #NOTE: no kwargs here as tool calls might need different parameters + elif step == self.max_tool_calls - 2: #special case where we ran out of tool calls or stuck in a loop + self._tool_fuse_broken = True #one last attempt at graceful response def streaming_query_with_current_memory(self, reconstruct_chunks = False, **kwargs): try: self._partial_streaming_chunks.clear() - while True: #TODO rewrite this super-ugly while-True-break stuff into proper recursion + for step in range(self.max_tool_calls): response = self._execute_completion(stream=True, **kwargs) - tool_messages: list[AbstractMessage] = [] + tool_messages: list[SupportedMessages] = [] for i, part in enumerate(response): self._partial_streaming_chunks.append(part) - msg: AbstractMessage = self._protocol.message_from_delta(response) # type: ignore + msg: SupportedMessage = self._protocol.message_from_delta(response) # type: ignore delta = self._protocol.content_from_delta(msg) if delta: if reconstruct_chunks: yield self._protocol.get_chunk(i, delta, options={'model': part["model"]}) else: yield response - if self.tools: + if self.tools and not self._tool_fuse_broken: tool_calls = self._protocol.tool_calls_from_message(msg) if tool_calls: self.add_to_memory( @@ -222,14 +231,18 @@ def streaming_query_with_current_memory(self, reconstruct_chunks = False, **kwar ) self._process_function_calls(tool_calls) tool_messages.append(self._process_function_calls(tool_calls)) + if not tool_messages: break + elif step == self.max_tool_calls - 2: # special case where we ran out of tool calls or stuck in a loop + self._tool_fuse_broken = True # one last attempt at graceful response finally: + self._tool_fuse_broken = False #defuse yield self._protocol.done() if len(self._partial_streaming_chunks) > 0: response = self._protocol.response_from_deltas(self._partial_streaming_chunks) - msg: AbstractMessage = self._protocol.message_from_response(response) # type: ignore + msg: SupportedMessage = self._protocol.message_from_response(response) # type: ignore self.handle_on_response(msg) self.add_to_memory(msg) self._partial_streaming_chunks.clear() @@ -248,7 +261,7 @@ def query(self, query_input: SupportedMessages, **kwargs) -> str: return result - def stream(self, query_input: SupportedMessages, reconstruct_chunks = False, **kwargs) -> Generator[Union[BaseModelResponse, AbstractMessage],None,None]: + def stream(self, query_input: SupportedMessages, reconstruct_chunks = False, **kwargs) -> Generator[Union[BaseModelResponse, SupportedMessages],None,None]: self.handle_on_query(query_input) self.add_to_memory(query_input) return self.streaming_query_with_current_memory(reconstruct_chunks=reconstruct_chunks, **kwargs) diff --git a/core/just_agents/base_memory.py b/core/just_agents/base_memory.py index f925bfa..84e0fe2 100644 --- a/core/just_agents/base_memory.py +++ b/core/just_agents/base_memory.py @@ -1,20 +1,20 @@ from pydantic import BaseModel, Field, PrivateAttr -from typing import Optional, Callable, List, Dict, Union +from typing import Optional, Callable, List, Dict from functools import singledispatchmethod -from just_agents.core.interfaces.IMemory import IMemory -from just_agents.core.types import Role, AbstractMessage, SupportedMessages, SupportedMessage +from just_agents.interfaces.memory import IMemory +from just_agents.types import Role, MessageDict, SupportedMessages from litellm.types.utils import Function from abc import ABC -OnMessageCallable = Callable[[AbstractMessage], None] +OnMessageCallable = Callable[[MessageDict], None] OnFunctionCallable = Callable[[Function], None] -class IBaseMemory(BaseModel, IMemory[Role, AbstractMessage], ABC): +class IBaseMemory(BaseModel, IMemory[Role, MessageDict], ABC): """ Abstract Base Class to fulfill Pydantic schema requirements for concrete-attributes. """ - messages: List[AbstractMessage] = Field(default_factory=list, validation_alias='conversation') + messages: List[MessageDict] = Field(default_factory=list, validation_alias='conversation') # Private dict of message handlers for each role _on_message: Dict[Role, List[OnMessageCallable]] = PrivateAttr(default_factory=lambda: { @@ -24,6 +24,9 @@ class IBaseMemory(BaseModel, IMemory[Role, AbstractMessage], ABC): Role.system: [], }) + def deepcopy(self) -> 'IBaseMemory': + return self.model_copy(deep=True) + class BaseMemory(IBaseMemory): """ The Memory class provides storage and handling of messages for a language model session. @@ -31,7 +34,7 @@ class BaseMemory(IBaseMemory): function calls categorized by roles: assistant, tool, user, and system. """ - def handle_message(self, message: AbstractMessage) -> None: + def handle_message(self, message: MessageDict) -> None: """ Implements the abstract method to handle messages based on their roles. """ @@ -92,7 +95,7 @@ def add_on_tool_call(self, fun: OnFunctionCallable) -> None: Adds a handler to track function calls. """ - def tool_handler(message: AbstractMessage) -> None: + def tool_handler(message: MessageDict) -> None: tool_calls = message.get('tool_calls', []) for call in tool_calls: function_name = call.get('function') @@ -169,7 +172,3 @@ def remove_on_system_message(self, handler: OnMessageCallable) -> None: """ self._remove_on_message(handler, Role.system) - def deepcopy(self) -> 'BaseMemory': - return self.model_copy(deep=True) - - diff --git a/core/just_agents/config/agent_profiles.yaml b/core/just_agents/config/agent_profiles.yaml deleted file mode 100644 index 85ae0ae..0000000 --- a/core/just_agents/config/agent_profiles.yaml +++ /dev/null @@ -1,41 +0,0 @@ -agent_profiles: - BaseAgentSchemaExample: - class_qualname: just_agents.base_agent.BaseAgent - completion_max_tries: 2 - completion_remove_key_on_error: true - description: Generic all-purpose AI agent - drop_params: true - on_response: [] - streaming_method: openai - system_prompt: You are a helpful AI assistant - SecretaryAgent: - autoload_from_yaml: false - backstory: Developed to streamline the process of understanding and documenting - AI agents. - class_qualname: just_agents.router.secretary_agent.SecretaryAgent - description: A skilled AI assistant specializing in generating detailed profiles - for AI agents. - expertise_domain: AI agent profiling and analysis. - extra_dict: - personality_traits: Agent's personality traits go here - goal: To assist in the analysis and description of AI agents. - knowledge_sources: [] - limitations: Limited to the information available up to October 2023; cannot create - new data. - llm_options: - model: gpt-4o-mini - temperature: 0.0 - model_name: gpt-4o-mini - personality_traits: Skilled, detail-oriented, concise, informative, analytical - role: AI assistant. - system_prompt: "\n You are a skilled AI assistant specializing in analysis\ - \ and description of AI agents. \n You are tasked with generation of a minimalistic\ - \ and concise yet detail-rich profile for an AI agent, based on the AVAILABLE_ATTRIBUTES,\ - \ \n including 'system_prompt', 'llm_options' and any other. Your task is\ - \ to fill in values of a JSON-formatted profile \n that matches the PROFILE_UPDATE_TEMPLATE\ - \ provided below. Values of the template describe what output is expected for\ - \ each field. \n Only populate fields based on the well-established information,\ - \ don't make up anything. \n Double-check that the output contains only a\ - \ valid JSON with all the fields specified in PROFILE_UPDATE_TEMPLATE. \n \ - \ Never include any additional text or explanations in your reply.\n " - task: Generating concise and informative profiles based on available attributes. diff --git a/core/just_agents/core/types.py b/core/just_agents/core/types.py deleted file mode 100644 index 532d0c6..0000000 --- a/core/just_agents/core/types.py +++ /dev/null @@ -1,68 +0,0 @@ -from enum import Enum - - -from typing import Type, TypeVar, Any, List, Union, Optional, Literal, cast, TypeAlias, Sequence, Callable, Dict - -from openai.types import CompletionUsage -from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam, ChatCompletionAssistantMessageParam, ChatCompletionToolMessageParam,ChatCompletionFunctionMessageParam -from openai.types.chat.chat_completion import ChatCompletion, Choice, ChatCompletionMessage -from pydantic import BaseModel, Field, HttpUrl - - -######### Common ########### -AbstractMessage = Dict[str, Any] -SupportedMessage = Union[str, AbstractMessage] -SupportedMessages = Union[SupportedMessage, List[SupportedMessage]] -Output = TypeVar('Output') - -class Role(str, Enum): - system = "system" - user = "user" - assistant = "assistant" - tool = "tool" - # make it similar to Literal["system", "user", "assistant", tool] while retaining enum convenience - - def __new__(cls, value, *args, **kwargs): - obj = str.__new__(cls, value) - obj._value_ = value - return obj - - def __str__(self): - return str(self.value) - -######### Helper ########### - -class ModelOptions(BaseModel): - model: str = Field( - ..., - examples=["gpt-4o-mini"], - description="LLM model name" - ) - temperature: Optional[float] = Field( - 0.0, - ge=0.0, - le=2.0, - examples=[0.7], - description="Sampling temperature, values from 0.0 to 2.0" - ) - top_p: Optional[float] = Field( - 1.0, - ge=0.0, - le=1.0, - examples=[0.9], - description="Nucleus sampling probability, values from 0.0 to 1.0" - ) - presence_penalty: Optional[float] = Field( - 0.0, - ge=-2.0, - le=2.0, - examples=[0.6], - description="Presence penalty, values from -2.0 to 2.0" - ) - frequency_penalty: Optional[float] = Field( - 0.0, - ge=-2.0, - le=2.0, - examples=[0.5], - description="Frequency penalty, values from -2.0 to 2.0" - ) \ No newline at end of file diff --git a/core/just_agents/config/__init__.py b/core/just_agents/interfaces/__init__.py similarity index 100% rename from core/just_agents/config/__init__.py rename to core/just_agents/interfaces/__init__.py diff --git a/core/just_agents/core/interfaces/IAgent.py b/core/just_agents/interfaces/agent.py similarity index 100% rename from core/just_agents/core/interfaces/IAgent.py rename to core/just_agents/interfaces/agent.py diff --git a/core/just_agents/streaming/protocols/interfaces/IFunctionCall.py b/core/just_agents/interfaces/function_call.py similarity index 100% rename from core/just_agents/streaming/protocols/interfaces/IFunctionCall.py rename to core/just_agents/interfaces/function_call.py diff --git a/core/just_agents/core/interfaces/IMemory.py b/core/just_agents/interfaces/memory.py similarity index 100% rename from core/just_agents/core/interfaces/IMemory.py rename to core/just_agents/interfaces/memory.py diff --git a/core/just_agents/streaming/protocols/interfaces/IProtocolAdapter.py b/core/just_agents/interfaces/protocol_adapter.py similarity index 87% rename from core/just_agents/streaming/protocols/interfaces/IProtocolAdapter.py rename to core/just_agents/interfaces/protocol_adapter.py index b92a848..2acea53 100644 --- a/core/just_agents/streaming/protocols/interfaces/IProtocolAdapter.py +++ b/core/just_agents/interfaces/protocol_adapter.py @@ -1,8 +1,8 @@ from abc import ABC, abstractmethod from pydantic import BaseModel from typing import Callable, Coroutine, Union, AsyncGenerator, List, Sequence, ClassVar, Type, TypeVar, Generic, Any -from just_agents.streaming.protocols.abstract_protocol import AbstractStreamingProtocol -from just_agents.streaming.protocols.interfaces.IFunctionCall import IFunctionCall +from just_agents.interfaces.streaming_protocol import IAbstractStreamingProtocol +from just_agents.interfaces.function_call import IFunctionCall BaseModelResponse = TypeVar('BaseModelResponse', bound=BaseModel) AbstractMessage = TypeVar("AbstractMessage") @@ -11,12 +11,12 @@ MessageUnpackCallback=Callable[[BaseModelResponse], AbstractMessage] ExecuteToolCallback=Callable[[Sequence[IFunctionCall]],List[AbstractMessage]] -class IProtocolAdapter(AbstractStreamingProtocol, ABC, Generic[BaseModelResponse, AbstractMessage]): +class IProtocolAdapter(IAbstractStreamingProtocol, ABC, Generic[BaseModelResponse, AbstractMessage]): """ Class that is required to wrap the model protocol """ function_convention: ClassVar[Type[IFunctionCall[Any]]] - _output_streaming: AbstractStreamingProtocol + _output_streaming: IAbstractStreamingProtocol execute_function_hook: ExecuteToolCallback[AbstractMessage] @abstractmethod diff --git a/core/just_agents/streaming/protocols/abstract_protocol.py b/core/just_agents/interfaces/streaming_protocol.py similarity index 89% rename from core/just_agents/streaming/protocols/abstract_protocol.py rename to core/just_agents/interfaces/streaming_protocol.py index 782230f..06beb67 100644 --- a/core/just_agents/streaming/protocols/abstract_protocol.py +++ b/core/just_agents/interfaces/streaming_protocol.py @@ -1,7 +1,7 @@ from typing import Any from abc import ABC, abstractmethod -class AbstractStreamingProtocol(ABC): +class IAbstractStreamingProtocol(ABC): @abstractmethod def get_chunk(self, index:int, delta:str, options:dict) -> Any: raise NotImplementedError("You need to implement get_chunk() first!") diff --git a/core/just_agents/just_bus.py b/core/just_agents/just_bus.py new file mode 100644 index 0000000..0511129 --- /dev/null +++ b/core/just_agents/just_bus.py @@ -0,0 +1,66 @@ +from typing import Callable, Dict, List + +class SingletonMeta(type): + """ + A metaclass that creates a Singleton base type when called. + """ + _instances: Dict[type, object] = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(SingletonMeta, cls).__call__(*args, **kwargs) + return cls._instances[cls] + +SubscriberCallback = Callable[[...],None] + +class JustEventBus(metaclass=SingletonMeta): + """ + A simple singleton event bus to publish function call results for functions that support it + Event name can be anything, but suggested use is function names. + """ + + _subscribers: Dict[str, List[SubscriberCallback]] + + def __init__(self): + # Dictionary of subscription_pattern -> list_of_callbacks (in subscription order) + self._subscribers = {} + + def subscribe(self, event_prefix: str, callback: SubscriberCallback): + """Subscribe a callback to a specific event.""" + """ + Subscribe to an event name or prefix: + e.g. 'mytool.call' or 'mytool.*' + """ + if event_prefix not in self._subscribers: + self._subscribers[event_prefix] = [] + self._subscribers[event_prefix].append(callback) + + def publish(self, event_name: str, *args, **kwargs): + """ + Publish an event to: + 1) Any exact-match subscriber (same string) + 2) Any prefix subscriber (ends with '.*' and event_name.startswith(prefix)) + """ + # (1) Get exact-match subscribers + exact_callbacks = self._subscribers.get(event_name, []) + + # (2) Find prefix-matching subscribers + prefix_callbacks : List[SubscriberCallback]= [] + for pattern, callbacks in self._subscribers.items(): + if pattern.endswith('.*'): + prefix = pattern[:-2] + if event_name.startswith(prefix): + prefix_callbacks.extend(callbacks) + + ## Invoke the callbacks with deduplication ( preserve order of subscription) + seen = set() + for cb in exact_callbacks + prefix_callbacks: + if cb not in seen: + cb(event_name, *args, **kwargs) + seen.add(cb) + + + + + + diff --git a/core/just_agents/just_profile.py b/core/just_agents/just_profile.py index de48371..305988d 100644 --- a/core/just_agents/just_profile.py +++ b/core/just_agents/just_profile.py @@ -1,8 +1,9 @@ from pathlib import Path from pydantic import Field, model_validator -from typing import Optional, List, ClassVar, Tuple, Sequence, Callable, Dict -from just_agents.core.just_serialization import JustSerializable -from just_agents.core.just_tool import JustTool, JustTools +from typing import Optional, List, ClassVar, Tuple, Sequence, Callable, Dict, Union, Type + +from just_agents.just_serialization import JustSerializable +from just_agents.just_tool import JustTool, JustTools class JustAgentProfile(JustSerializable): @@ -114,11 +115,11 @@ def load_legacy_schema( def convert_from_legacy( legacy_file_path: Path, output_file_path: Optional[Path] = None, - class_hint: str = "just_agents.base_agent.BaseAgent", + class_hint: Optional[Union[Type|str]] = 'just_agents.base_agent.BaseAgent', section_name: Optional[str] = None, parent_section: Optional[str] = DEFAULT_PARENT_SECTION, - exclude_defaults: bool = True, - exclude_unset: bool = True + exclude_defaults: bool = False, + exclude_unset: bool = False ) -> 'JustAgentProfile': """ Converts a legacy agent schema file to the new format and saves it. @@ -129,6 +130,8 @@ def convert_from_legacy( section_name (str): Name of the section to save the converted agent under parent_section (str): Parent section name to save the converted agent under class_hint (str): A substitute class to use if not specified by legacy schema + exclude_defaults (bool): Whether to exclude fields with the default values from the output. + exclude_unset (bool): Whether to exclude unset fields from the output. Returns: JustAgentProfile: The converted agent profile instance @@ -138,7 +141,6 @@ def convert_from_legacy( file_path=legacy_file_path, class_hint=class_hint ) - file_path = output_file_path or legacy_file_path # Save in new format diff --git a/core/just_agents/core/just_serialization.py b/core/just_agents/just_serialization.py similarity index 94% rename from core/just_agents/core/just_serialization.py rename to core/just_agents/just_serialization.py index 2e91172..4942389 100644 --- a/core/just_agents/core/just_serialization.py +++ b/core/just_agents/just_serialization.py @@ -1,6 +1,6 @@ import yaml import importlib -from typing import Optional, Dict, Any, ClassVar, Sequence, Union, Set +from typing import Optional, Dict, Any, ClassVar, Sequence, Union, Set, Type from pathlib import Path from pydantic import BaseModel, Field, field_validator, ValidationError from collections.abc import MutableMapping, MutableSequence @@ -18,6 +18,19 @@ class JustYaml: Updates a section within a YAML file with new data. """ + @staticmethod + def str_presenter(dumper, data): + """ + An override to write multiline strings (like prompts!) to yaml in scalar form that removes trailing spaces + see https://stackoverflow.com/questions/8640959/how-can-i-control-what-scalar-form-pyyaml-uses-for-my-data + pyyaml bug: https://github.com/yaml/pyyaml/issues/121 + """ + if len(data.splitlines()) > 1 or '\n' in data: # check for multiline string + text_list = [line.rstrip() for line in data.splitlines()] + fixed_data = "\n".join(text_list) + return dumper.represent_scalar('tag:yaml.org,2002:str', fixed_data, style='|') + return dumper.represent_scalar('tag:yaml.org,2002:str', data) + @staticmethod def read_yaml_data( file_path: Path, @@ -40,7 +53,7 @@ def read_yaml_data( """ if file_path.exists(): with file_path.open('r') as f: - data = yaml.safe_load(f) or {} + data = yaml.load(f, Loader=yaml.FullLoader) or {} else: raise FileNotFoundError( f"File '{file_path}' not found." @@ -51,10 +64,10 @@ def read_yaml_data( return data[parent_section][section_name] else: return data[section_name] - except KeyError: - raise ValueError( + except KeyError as e: + raise KeyError( f"Section '{section_name}' under parent section '{parent_section}' not found in '{file_path}'" - ) + ) from e @staticmethod def read_yaml_data_safe( @@ -136,8 +149,14 @@ def save_to_yaml( # Write the updated data back to the YAML file with file_path.open('w') as f: yaml.safe_dump(data, f) + #yaml.safe_dump(data, f) +# configure YAML to use fixed representer: +yaml.add_representer(str, JustYaml.str_presenter) +# to use with safe_dump: +yaml.representer.SafeRepresenter.add_representer(str, JustYaml.str_presenter) + class JustSerializable(BaseModel, extra="allow", use_enum_values=True, validate_assignment=True, populate_by_name=True): """ Pydantic2 wrapper class that implements semi-automated YAML and JSON serialization and deserialization @@ -320,7 +339,7 @@ def from_yaml(cls, section_name: str, def from_yaml_auto(section_name: str, parent_section: Optional[str], file_path: Path, - class_hint: Optional[str] = None, + class_hint: Optional[Union[Type|str]] = None, ) -> Any: """ Creates an instance from a YAML file. @@ -339,6 +358,8 @@ def from_yaml_auto(section_name: str, Any: An instance of the dynamically imported class if `class_qualname` is found in the configuration data; otherwise, returns None. """ + if isinstance(class_hint, type): + class_hint : str = f"{class_hint.__module__}.{class_hint.__qualname__}" config_data = JustYaml.read_yaml_data_safe( file_path, section_name, @@ -346,7 +367,6 @@ def from_yaml_auto(section_name: str, ) if config_data is None: return None - instance = None config_data = JustSerializable.update_config_data(config_data, section_name, parent_section, file_path, class_hint=class_hint) class_qualname = config_data.get("class_qualname") if class_qualname: @@ -357,10 +377,9 @@ def from_yaml_auto(section_name: str, cls = getattr(module, class_name) # Dynamic instantiation of `Child class` or whatever class is specified instance = cls.from_json(config_data) - except Exception as e: - raise ValueError(f"Exception occurred: {str(e)}") - finally: return instance + except Exception as e: + raise ValueError(f"Exception occurred: {str(e)}") from e else: return None @@ -388,7 +407,7 @@ def to_json( exclude_none (bool): Whether to exclude fields with None values from the output. serialize_as_any (bool): Whether to serialize values by their types. exclude_defaults (bool): Whether to exclude fields with the default values from the output. - exclude_unset (bool): Whether to exclude unset fields from the output. + exclude_unset (bool): Whether to exclude fields that were not explicitly set during instance creation from the output. Returns: Dict[str, Any]: A dictionary representation of the instance, including extra fields. @@ -400,8 +419,8 @@ def to_json( include=include, exclude=exclude, serialize_as_any=serialize_as_any, - exclude_defaults=exclude_defaults, - exclude_unset=exclude_unset + exclude_defaults=exclude_defaults, + exclude_unset=exclude_unset, ) # Flatten Extras if include_extras and self.extras: @@ -474,7 +493,7 @@ def save_to_yaml( exclude_none: bool = True, serialize_as_any: bool = True, exclude_defaults: bool = True, - exclude_unset: bool = True + exclude_unset: bool = False ): """ Saves the instance's data to a YAML file under the specified parent section and section name (shortname). diff --git a/core/just_agents/core/just_tool.py b/core/just_agents/just_tool.py similarity index 77% rename from core/just_agents/core/just_tool.py rename to core/just_agents/just_tool.py index 2fcc3ce..b9b3522 100644 --- a/core/just_agents/core/just_tool.py +++ b/core/just_agents/just_tool.py @@ -2,12 +2,21 @@ from litellm.utils import function_to_dict from pydantic import BaseModel, Field, PrivateAttr -import importlib +from just_bus import JustEventBus +from importlib import import_module import inspect FunctionParamFields=Literal["kind","default","type_annotation"] FunctionParams = List[Dict[str, Dict[FunctionParamFields,Optional[str]]]] + +class JustToolsBus(JustEventBus): + """ + A simple singleton tools bus. + Inherits from JustEventBus with no additional changes. + """ + pass + class LiteLLMDescription(BaseModel, populate_by_name=True): name: Optional[str] = Field(..., alias='function', description="The name of the function") description: Optional[str] = Field(None, description="The docstring of the function.") @@ -17,7 +26,7 @@ class JustTool(LiteLLMDescription): package: str = Field(..., description="The name of the module where the function is located.") auto_refresh: bool = Field(True, description="Whether to automatically refresh the tool after initialization.") arguments: Optional[FunctionParams] = Field( - None, description="List of parameters with their details." + None, description="List of parameters with their details.", exclude=True ) _callable: Optional[Callable] = PrivateAttr(default=None) @@ -27,6 +36,34 @@ def model_post_init(self, __context): if self.auto_refresh: self.refresh() + @staticmethod + def _wrap_function(func: Callable, name: str) -> Callable: + """ + Helper to wrap a function with event publishing logic to JustToolsBus. + """ + def __wrapper(*args, **kwargs): + bus = JustToolsBus() + bus.publish(f"{name}.call", args=args, kwargs=kwargs) + result = func(*args, **kwargs) + bus.publish(f"{name}.result", args=args, kwargs=kwargs, result=result) + return result + return __wrapper + + + @staticmethod + def _extract_parameters(func: Callable) -> List[Dict[str, Any]]: + """Extract parameters from the function's signature.""" + signature = inspect.signature(func) + parameters = [] + for name, param in signature.parameters.items(): + param_info = { + 'kind': str(param.kind), + 'default': str(param.default) if param.default != param.empty else None, + 'type_annotation': str(param.annotation) if param.annotation != param.empty else None + } + parameters.append({ name: param_info}) + return parameters + def get_litellm_description(self) -> Dict[str, Any]: dump = self.model_dump( mode='json', @@ -37,32 +74,24 @@ def get_litellm_description(self) -> Dict[str, Any]: ) return dump + @classmethod def from_callable(cls, input_function: Callable) -> 'JustTool': """Create a JustTool instance from a callable.""" package = input_function.__module__ litellm_description = function_to_dict(input_function) arguments = cls._extract_parameters(input_function) + + wrapped_callable = cls._wrap_function(input_function, litellm_description['function']) + return cls( **litellm_description, package=package, arguments=arguments, - _callable=input_function, + _callable=wrapped_callable, ) - @staticmethod - def _extract_parameters(func: Callable) -> List[Dict[str, Any]]: - """Extract parameters from the function's signature.""" - signature = inspect.signature(func) - parameters = [] - for name, param in signature.parameters.items(): - param_info = { - 'kind': str(param.kind), - 'default': str(param.default) if param.default != param.empty else None, - 'type_annotation': str(param.annotation) if param.annotation != param.empty else None - } - parameters.append({ name: param_info}) - return parameters + def refresh(self)->'JustTool': """ @@ -72,10 +101,8 @@ def refresh(self)->'JustTool': JustTool: Returns self to allow method chaining or direct appending. """ try: - # Import the module - package = importlib.import_module(self.package) # Get the function from the module - func = getattr(package, self.name) + func = getattr(import_module(self.package), self.name) # Update LiteLLM description litellm_description = LiteLLMDescription (**function_to_dict(func)) # Update the description @@ -83,12 +110,12 @@ def refresh(self)->'JustTool': # Update parameters self.parameters= litellm_description.parameters self.arguments = self._extract_parameters(func) - # Update the cached callable - self._callable = func + # Rewrap with the updated callable + self._callable = self._wrap_function(func, self.name) - return self # Return self to allow chaining or direct appending + return self except (ImportError, AttributeError) as e: - raise ImportError(f"Error refreshing {self.name} from {self.package}: {e}") + raise ImportError(f"Error refreshing {self.name} from {self.package}: {e}") from e def get_callable(self, refresh: bool = False) -> Callable: """ @@ -100,10 +127,9 @@ def get_callable(self, refresh: bool = False) -> Callable: if self._callable is not None: return self._callable try: - package = importlib.import_module(self.package) - func = getattr(package, self.name) - self._callable = func # Cache the callable - return func + func = getattr(import_module(self.package), self.name) + self._callable = self._wrap_function(func, self.name) + return self._callable except (ImportError, AttributeError) as e: raise ImportError(f"Error importing {self.name} from {self.package}: {e}") @@ -118,4 +144,3 @@ def __call__(self, *args, **kwargs): Union[JustTool, Callable] ] # A sequence (like a list or tuple) containing either JustTool instances or callable objects (functions). ] -# Although an internal dictionary representation is preferable, a list representation of tools can be handled and converted to a dictionary using validation. diff --git a/core/just_agents/llm_options.py b/core/just_agents/llm_options.py index e92d7b7..d99e0db 100644 --- a/core/just_agents/llm_options.py +++ b/core/just_agents/llm_options.py @@ -1,10 +1,43 @@ from typing import Any, Dict, List, Optional -from pydantic import Field, HttpUrl - -from just_agents.core.types import ModelOptions +from pydantic import Field, HttpUrl, BaseModel LLMOptions = Dict[str, Any] +class ModelOptions(BaseModel): + model: str = Field( + ..., + examples=["gpt-4o-mini"], + description="LLM model name" + ) + temperature: Optional[float] = Field( + 0.0, + ge=0.0, + le=2.0, + examples=[0.7], + description="Sampling temperature, values from 0.0 to 2.0" + ) + top_p: Optional[float] = Field( + 1.0, + ge=0.0, + le=1.0, + examples=[0.9], + description="Nucleus sampling probability, values from 0.0 to 1.0" + ) + presence_penalty: Optional[float] = Field( + 0.0, + ge=-2.0, + le=2.0, + examples=[0.6], + description="Presence penalty, values from -2.0 to 2.0" + ) + frequency_penalty: Optional[float] = Field( + 0.0, + ge=-2.0, + le=2.0, + examples=[0.5], + description="Frequency penalty, values from -2.0 to 2.0" + ) + class LLMOptionsBase(ModelOptions, extra="allow"): api_key: Optional[str] = Field(None, examples=["sk-proj-...."]) api_base : Optional[HttpUrl] = Field(default=None, @@ -16,12 +49,12 @@ class LLMOptionsBase(ModelOptions, extra="allow"): tool_choice : Optional[str] = None -ANTHROPIC_CLAUDE_3_5_SONNET: dict[str, Any] = { +ANTHROPIC_CLAUDE_3_5_SONNET: LLMOptions = { "model": "claude-3-5-sonnet-20240620", "temperature": 0.0 } -LLAMA3_2_VISION: dict[str, Any] = { +LLAMA3_2_VISION: LLMOptions = { #supports both text and vision "model": "groq/llama-3.2-90b-vision-preview", "api_base": "https://api.groq.com/openai/v1", @@ -44,75 +77,75 @@ class LLMOptionsBase(ModelOptions, extra="allow"): } -MISTRAL_8x22B = { +MISTRAL_8x22B: LLMOptions = { "model": "mistral/open-mixtral-8x22b", "temperature": 0.0 } -OPENAI_GPT4o: dict[str, Any] = { +OPENAI_GPT4o: LLMOptions = { "model": "gpt-4o", "temperature": 0.0 } -OPENAI_GPT4oMINI: dict[str, Any] = { +OPENAI_GPT4oMINI: LLMOptions = { "model": "gpt-4o-mini", "temperature": 0.0 } -OPENAI_O1_MINI: dict[str, Any] = { +OPENAI_O1_MINI: LLMOptions = { "model": "o1-mini", "temperature": 0.0 } -OPENAI_O1_PREVIEW: dict[str, Any] = { +OPENAI_O1_PREVIEW: LLMOptions = { "model": "o1-preview", "temperature": 0.0 } -PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: dict[str, Any]= { +PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: LLMOptions= { "model": "perplexity/llama-3.1-sonar-large-128k-online", "temperature": 0.0, "return_citations": True, "return_related_questions": True } -OPEN_ROUTER_Qwen_2_72B_Instruct: dict[str, Any] = { +OPEN_ROUTER_Qwen_2_72B_Instruct: LLMOptions = { "model": "openrouter/qwen/qwen-2-72b-instruct", "temperature": 0.0, "tools": [] } -OPEN_ROUTER_Qwen_2_72B_Instruct_Vision: dict[str, Any] = { +OPEN_ROUTER_Qwen_2_72B_Instruct_Vision: LLMOptions = { "model": "openrouter/qwen/qwen-2-vl-72b-instruct", "temperature": 0.0, "tools": [] } -OPEN_ROUTER_LLAMA_3_8B_FREE: dict[str, Any] = { +OPEN_ROUTER_LLAMA_3_8B_FREE: LLMOptions = { "model": "openrouter/meta-llama/llama-3-8b-instruct:free", "temperature": 0.0, "tools": [] } -OPEN_ROUTER_GEMINI_1_5_FLASH_EXP_FREE: dict[str, Any] = { +OPEN_ROUTER_GEMINI_1_5_FLASH_EXP_FREE: LLMOptions = { "model": "openrouter/google/gemini-flash-1.5-exp", "temperature": 0.0, "tools": [] } -DEEPSEEK_CODER: dict[str, Any] = { +DEEPSEEK_CODER: LLMOptions = { "model": "deepseek/deepseek-coder", "temperature": 0.0, "tools": [] } -DEEPSEEK_CHAT: dict[str, Any] = { +DEEPSEEK_CHAT: LLMOptions = { "model": "deepseek/deepseek-chat", "temperature": 0.0, "tools": [] } -def local_vllm_model(model: str = "models/granite-7b-lab.Q4_K_M.gguf", host: str="http://localhost:8000") -> dict[str, Any]: +def local_vllm_model(model: str = "models/granite-7b-lab.Q4_K_M.gguf", host: str="http://localhost:8000") -> LLMOptions: return { "model": f"hosted_vllm/{model}", "temperature": 0.0, diff --git a/core/just_agents/patterns/chain_of_throught.py b/core/just_agents/patterns/chain_of_throught.py index 7d804b4..873240a 100644 --- a/core/just_agents/patterns/chain_of_throught.py +++ b/core/just_agents/patterns/chain_of_throught.py @@ -1,9 +1,9 @@ from typing import ClassVar, Literal, Any from just_agents.base_agent import BaseAgent -from pydantic import BaseModel, Field -from just_agents.core.types import Output, SupportedMessages -from just_agents.core.interfaces.IAgent import IAgent -from just_agents.patterns.interfaces.IThinkingAgent import IThinkingAgent, IThought, THOUGHT_TYPE +from pydantic import Field +from just_agents.types import SupportedMessages +from just_agents.patterns.interfaces.IThinkingAgent import IThinkingAgent, IThought + class Thought(IThought): """ @@ -28,8 +28,9 @@ class ChainOfThoughtAgent(BaseAgent, IThinkingAgent[SupportedMessages, Supported # 3. Consider limitations and alternative answers # 4. Use multiple methods to verify answers # 5. Format response as JSON with specific fields - DEFAULT_SYSTEM_PROMPT: ClassVar[str] = """ -You are an expert AI assistant that explains your reasoning step by step. + DEFAULT_COT_PROMPT: ClassVar[str] = """ You are an expert AI assistant that explains your reasoning step by step. + """ + RESPONSE_FORMAT: ClassVar[str] = """ For each step, provide a title that describes what you're doing in that step, along with the content. Decide if you need another step or if you're ready to give the final answer. Respond in JSON format with "title", "content", and "next_action" (either "continue" or "final_answer") keys. @@ -52,22 +53,29 @@ class ChainOfThoughtAgent(BaseAgent, IThinkingAgent[SupportedMessages, Supported } """ - - # Allow customization of the system prompt while maintaining the default as fallback system_prompt: str = Field( - DEFAULT_SYSTEM_PROMPT, + DEFAULT_COT_PROMPT, description="System prompt of the agent") - + + response_format: str = Field( + RESPONSE_FORMAT, + description="Response format of the agent") + + max_steps: int = Field(IThinkingAgent.MAX_STEPS, ge=1, description="Maximum number of reasoning steps") + append_response_format: bool = Field(True, + description="Whether to append default COT prompt of this agent to the provided") + + def model_post_init(self, __context: Any) -> None: + # Call parent class's post_init first (from JustAgentProfile) + super().model_post_init(__context) + if self.append_response_format: + system_prompt = self.system_prompt + "\n\n" + self.response_format + self.memory.clear_messages() + self.instruct(system_prompt) # don't modify self system prompt to avoid saving it into profile + def thought_query(self, query: SupportedMessages, **kwargs) -> Thought: # Parses the LLM response into a structured Thought object if self.supports_response_format and "gpt-4" in self.llm_options["model"]: # despite what they declare only openai does support reponse format right return self.query_structural(query, parser=Thought, response_format={"type": "json_object"}, **kwargs) # type: ignore else: return self.query_structural(query, parser=Thought, **kwargs) # type: ignore - - @classmethod - def with_prompt_prefix(cls, llm_options: dict, custom_prompt: str) -> "ChainOfThoughtAgent": - # Factory method (alternative constructor) to create an agent with a custom prompt prefix - # Preserves the default system prompt by appending it to the custom prompt - system_prompt=custom_prompt + "\n\n" + cls.DEFAULT_SYSTEM_PROMPT - return cls(llm_options=llm_options, system_prompt=system_prompt) # type: ignore diff --git a/core/just_agents/patterns/interfaces/IThinkingAgent.py b/core/just_agents/patterns/interfaces/IThinkingAgent.py index e82878d..4fc847e 100644 --- a/core/just_agents/patterns/interfaces/IThinkingAgent.py +++ b/core/just_agents/patterns/interfaces/IThinkingAgent.py @@ -1,36 +1,58 @@ -from just_agents.core.interfaces.IAgent import * +from just_agents.interfaces.agent import * +from typing import ClassVar, Any # New TypeVar for Thought THOUGHT_TYPE = TypeVar('THOUGHT_TYPE', bound='IThought') class IThought(BaseModel): + content: Any # + @abstractmethod def is_final(self) -> bool: raise NotImplementedError("You need to implement is_final() abstract method first!") +class ErrorThought(IThought): + def is_final(self) -> bool: + # Define logic to determine if this thought is final + return True # For error thoughts, consider them final class IThinkingAgent( IAgent[AbstractQueryInputType, AbstractQueryResponseType, AbstractStreamingChunkType], Generic[AbstractQueryInputType, AbstractQueryResponseType, AbstractStreamingChunkType, THOUGHT_TYPE] ): - + MAX_STEPS: ClassVar[int] = 8 + @abstractmethod def thought_query(self, response: AbstractQueryInputType, **kwargs) -> THOUGHT_TYPE: raise NotImplementedError("You need to implement thought_query abstract method first!") def think(self, query: AbstractQueryInputType, - max_iter: int = 8, + max_iter: Optional[int] = None, chain: Optional[list[THOUGHT_TYPE]] = None, **kwargs ) -> tuple[Optional[THOUGHT_TYPE], Optional[list[THOUGHT_TYPE]]]: """ This method will continue to query the agent until the final thought is not None or the max_iter is reached. Returns a tuple of (final_thought, thought_chain) """ - current_chain = chain or [] - thought = self.thought_query(query, **kwargs) #queries itself with thought as expected output - new_chain = [*current_chain, thought] #updates chain with the new thought - if thought.is_final() or max_iter <= 0: - return (thought, new_chain) #returns the final thought and the chain that preceded it - else: - return self.think(query, max_iter - 1, new_chain, **kwargs) #continues the thought process \ No newline at end of file + if not max_iter: + max_iter = IThinkingAgent.MAX_STEPS + if max_iter < 1: + return (None,None) + current_chain = list(chain) if chain else [] #shallow copy chain rather than modifying mutable instance + for step in range(max_iter): + try: + thought = self.thought_query(query, **kwargs) # queries itself with thought as expected output + except Exception as e: + return ( + ErrorThought(content=f"Error during thought_query at step {step + 1}: {e}"), + current_chain + ) + + current_chain.append(thought) # updates chain with the new thought + if thought.is_final() or step == max_iter-1: + return ( + thought, + current_chain + ) #returns the final thought and the chain that preceded it + diff --git a/core/just_agents/patterns/interfaces/ITypedAgent.py b/core/just_agents/patterns/interfaces/ITypedAgent.py index 0a3f698..a6224b8 100644 --- a/core/just_agents/patterns/interfaces/ITypedAgent.py +++ b/core/just_agents/patterns/interfaces/ITypedAgent.py @@ -1,4 +1,4 @@ -from just_agents.core.interfaces.IAgent import * +from just_agents.interfaces.agent import * class ITypedAgent( IAgent[AbstractQueryInputType, AbstractQueryResponseType, AbstractStreamingChunkType], diff --git a/core/just_agents/patterns/reflection_agent.py b/core/just_agents/patterns/reflection_agent.py index 6610ae1..78048ef 100644 --- a/core/just_agents/patterns/reflection_agent.py +++ b/core/just_agents/patterns/reflection_agent.py @@ -1,5 +1,5 @@ from typing import Union, Dict, Sequence, AsyncGenerator, Any -from just_agents.core.interfaces.IAgent import IAgent +from just_agents.interfaces.agent import IAgent ITERATIONS = "iterations" CRITIC_PROMPT = "critic_prompt" diff --git a/core/just_agents/core/__init__.py b/core/just_agents/protocols/__init__.py similarity index 100% rename from core/just_agents/core/__init__.py rename to core/just_agents/protocols/__init__.py diff --git a/core/just_agents/protocols/echo_protocol.py b/core/just_agents/protocols/echo_protocol.py new file mode 100644 index 0000000..333871d --- /dev/null +++ b/core/just_agents/protocols/echo_protocol.py @@ -0,0 +1,238 @@ +import random +from pydantic import BaseModel, Field, PrivateAttr +from typing import Any, List, Union, Dict, Sequence, Coroutine, AsyncGenerator, Generator, ClassVar, Type, Callable, Optional +from openai.types.chat.chat_completion import ChatCompletion, Choice, ChatCompletionMessage +from just_agents.interfaces.protocol_adapter import IProtocolAdapter, ExecuteToolCallback, AbstractMessage +from just_agents.interfaces.function_call import IFunctionCall, ToolByNameCallback +from just_agents.interfaces.streaming_protocol import IAbstractStreamingProtocol +from just_agents.protocols.openai_streaming import OpenaiStreamingProtocol + +class NoopFunctionCall(BaseModel, IFunctionCall[AbstractMessage]): + """ + A no-op function call implementation that performs no actual work. + """ + id: str = Field("noop_id", description="Function call identifier.") + name: str = Field("noop_function", description="The name of the no-op function.") + arguments: Any = Field(None, description="Arguments for the no-op function.") + + def execute_function(self, call_by_name: ToolByNameCallback) -> AbstractMessage: + """ + Execute the no-op function call. + + Args: + call_by_name: A callback to retrieve a tool by name. + + Returns: + AbstractMessage: A simple tool message indicating no execution was done. + """ + return {"role": "tool", "content": "Noop function execution.", "name": self.name, "tool_call_id": self.id} + + @staticmethod + def reconstruct_tool_call_message(calls: Sequence['NoopFunctionCall']) -> AbstractMessage: + """ + Reconstruct a message from multiple no-op calls. + + Args: + calls: A sequence of no-op function calls. + + Returns: + AbstractMessage: A simple assistant message indicating no-op calls. + """ + return {"role": "assistant", "content": "Noop calls reconstructed."} + +# class ChatCompletionChoice(Choice): +# finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter", "function_call"]] = None +# # text: Optional[str] = Field(default=None, alias="message.content") +# message : Optional[ResponseMessage] +# +# class ChatCompletionChoiceChunk(ChatCompletionChoice): +# delta: ResponseMessage = Field(default=None) +# message: Optional[ResponseMessage] = Field(default=None, exclude=True) #hax +# +class ChatCompletionUsage(CompletionUsage): + prompt_tokens: int = Field(default=0) + completion_tokens: int = Field(default=0) + total_tokens: int = Field(default=0) +# + +# class Context(BaseModel): +# mode : str +# context : Any +# +class ChatCompletionResponse(ChatCompletion): + id: str + object: Literal["chat.completion", "chat.completion.chunk"] + created: Union[int,float] + model: str + choices: List[ChatCompletionChoice] + usage: Optional[ChatCompletionUsage] = Field(default=None) + + + +class EchoModelResponse(BaseModel): + """ + A response model that simply echoes the input content. + """ + content: Union[str, Any] = Field(..., description="The echoed content.") + role: str = Field("assistant", description="The role associated with this response.") + metadata: Dict[str, Any] = Field(default_factory=dict, description="Additional metadata for the response.") + +class StreamedEchoModelResponse(EchoModelResponse, populate_by_name=True): + """ + A streamed response model that includes a delta field. + 'content' is synchronized with 'delta' by using alias and populate_by_name=True. + """ + delta: str = Field(..., alias="content", description="Streamed delta content.") + role: str = Field("assistant", description="The role associated with this response.") + metadata: Dict[str, Any] = Field(default_factory=dict, description="Additional metadata for the response.") + + +class EchoProtocolAdapter(BaseModel, IProtocolAdapter[EchoModelResponse, AbstractMessage]): + """ + An echo protocol adapter that returns the input as output, + serving as a stub for testing without actual model calls. + """ + function_convention: ClassVar[Type[IFunctionCall[Any]]] = NoopFunctionCall + execute_function_hook: ExecuteToolCallback[AbstractMessage] = Field(..., description="Callback to execute tool functions.") + _output_streaming: IAbstractStreamingProtocol = PrivateAttr(default_factory=OpenaiStreamingProtocol) + + def _completion(self, prompt: str) -> EchoModelResponse: + """ + Produce a complete EchoModelResponse. + + Args: + prompt: The input prompt. + + Returns: + EchoModelResponse: The echoed prompt. + """ + return EchoModelResponse(content=prompt) + + def _streaming_completion(self, prompt: str) -> Generator[StreamedEchoModelResponse, None, None]: + """ + Produce a streaming response by splitting the prompt into random-sized chunks + and yielding them as StreamedEchoModelResponse. + + Args: + prompt: The input prompt. + + Yields: + StreamedEchoModelResponse: Each chunk of the prompt as a streamed delta. + """ + data = prompt + start = 0 + while start < len(data): + chunk_size = random.randint(2, 5) + chunk = data[start:start + chunk_size] + start += chunk_size + + # Use the streaming protocol to simulate a chunk + yield self._output_streaming.get_chunk(index=start, delta=chunk, options={"model": "echo-model"}) + + yield self._output_streaming.done() + + def completion(self, *args, **kwargs) -> Union[EchoModelResponse, Generator[StreamedEchoModelResponse, None, None]]: + """ + Return either a full EchoModelResponse or a streaming generator of StreamedEchoModelResponse + depending on 'streaming' flag in kwargs. + + Args: + *args: Additional positional arguments. + **kwargs: Additional keyword arguments, including 'prompt' and 'streaming'. + + Returns: + Union[EchoModelResponse, Generator[StreamedEchoModelResponse]]: + If 'streaming' is True, returns a generator for streaming chunks; + otherwise, returns a full EchoModelResponse. + """ + prompt = kwargs.get("prompt", "") + streaming = kwargs.get("streaming", False) + if streaming: + return self._streaming_completion(prompt) + else: + return self._completion(prompt) + + async def async_completion(self, *args, **kwargs) \ + -> Coroutine[Any, Any, Union[EchoModelResponse, AsyncGenerator[StreamedEchoModelResponse, None]]]: + """ + An async wrapper around completion. + + Args: + *args: Additional positional arguments. + **kwargs: Additional keyword arguments. + + Returns: + The same result as completion(), but awaited if necessary. + """ + # Wrap the synchronous completion in an async context + # Since completion might return either a generator or a single response, + # we just await in a trivial manner. If it's a generator, it's yielded upon iteration. + result = self.completion(*args, **kwargs) + return result + + def message_from_response(self, response: EchoModelResponse) -> AbstractMessage: + """ + Convert EchoModelResponse to an abstract message dict. + + Args: + response: The EchoModelResponse instance. + + Returns: + AbstractMessage: A dictionary with role, content, and metadata. + """ + return { + "role": response.role, + "content": response.content, + "metadata": response.metadata + } + + def message_from_delta(self, response: EchoModelResponse) -> AbstractMessage: + """ + Convert a delta EchoModelResponse to an abstract message. + + Args: + response: The delta EchoModelResponse. + + Returns: + AbstractMessage: The same abstract message structure as a full response. + """ + return self.message_from_response(response) + + def content_from_delta(self, delta: AbstractMessage) -> str: + """ + Extract textual content from a delta message. + + Args: + delta: The abstract message dict. + + Returns: + str: The content string from the delta. + """ + return delta.get("content", "") + + def tool_calls_from_message(self, message: AbstractMessage) -> List[IFunctionCall[AbstractMessage]]: + """ + Extract tool calls from the given message. This echo protocol does not produce tool calls. + + Args: + message: The abstract message dict. + + Returns: + List[IFunctionCall]: An empty list as no tool calls are produced. + """ + return [] + + def response_from_deltas(self, deltas: List[EchoModelResponse]) -> EchoModelResponse: + """ + Combine all deltas into a single EchoModelResponse. + + Args: + deltas: A list of EchoModelResponse chunks. + + Returns: + EchoModelResponse: A single response with concatenated content. + """ + combined_content = "".join(delta.content for delta in deltas) + return EchoModelResponse(content=combined_content) + + # get_chunk and done are inherited from IProtocolAdapter and rely on _output_streaming diff --git a/core/just_agents/streaming/openai_protocol_adapter.py b/core/just_agents/protocols/litellm_protocol.py similarity index 56% rename from core/just_agents/streaming/openai_protocol_adapter.py rename to core/just_agents/protocols/litellm_protocol.py index bdc5f22..5de975f 100644 --- a/core/just_agents/streaming/openai_protocol_adapter.py +++ b/core/just_agents/protocols/litellm_protocol.py @@ -1,20 +1,41 @@ - import json from litellm import ModelResponse, CustomStreamWrapper, completion, acompletion, stream_chunk_builder -from typing import Optional, Callable, Union, Coroutine, ClassVar, Type, Sequence, List, Any, AsyncGenerator - -from pydantic import Field, AliasPath, PrivateAttr, BaseModel, Json, field_validator - -from just_agents.core.types import AbstractMessage - -from just_agents.streaming.protocols.interfaces.IFunctionCall import IFunctionCall, ToolByNameCallback -from just_agents.streaming.protocols.interfaces.IProtocolAdapter import IProtocolAdapter, ExecuteToolCallback -from just_agents.streaming.protocols.abstract_protocol import AbstractStreamingProtocol -from just_agents.streaming.protocols.openai_streaming import OpenaiStreamingProtocol - - -class OAIFunctionCall(BaseModel, IFunctionCall[AbstractMessage], extra="allow"): +from typing import Optional, Union, Coroutine, ClassVar, Type, Sequence, List, Any, AsyncGenerator +from pydantic import HttpUrl, Field, AliasPath, PrivateAttr, BaseModel, Json, field_validator + +from just_agents.types import MessageDict, Role + +from just_agents.interfaces.function_call import IFunctionCall, ToolByNameCallback +from just_agents.interfaces.protocol_adapter import IProtocolAdapter, ExecuteToolCallback +from just_agents.interfaces.streaming_protocol import IAbstractStreamingProtocol +from just_agents.protocols.openai_streaming import OpenaiStreamingProtocol + +#from openai.types import CompletionUsage +#from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam, ChatCompletionAssistantMessageParam, ChatCompletionToolMessageParam,ChatCompletionFunctionMessageParam +#from openai.types.chat.chat_completion import ChatCompletion, Choice, ChatCompletionMessage + +# Content types +class TextContent(BaseModel): + type: str = Field("text", examples=["text"]) + text: str = Field(..., examples=["What are in these images? Is there any difference between them?"]) + +class ImageContent(BaseModel): + type: str = Field("image_url", examples=["image_url"]) + image_url: HttpUrl = Field(..., examples=["https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"]) + +# Message class - Simple string content or a list of text or image content for vision model +class Message(BaseModel): + role: Role = Field(..., examples=[Role.assistant]) + content: Union[ + str, # Simple string content + List[Union[TextContent, ImageContent]] + ] = Field( + ..., + description="Content can be a simple string, or a list of content items including text or image URLs." + ) + +class LiteLLMFunctionCall(BaseModel, IFunctionCall[MessageDict], extra="allow"): id: str = Field(...) name: str = Field(..., validation_alias=AliasPath('function', 'name')) arguments: Json[dict] = Field(..., validation_alias=AliasPath('function', 'arguments')) @@ -39,7 +60,7 @@ def execute_function(self, call_by_name: ToolByNameCallback): return message @staticmethod - def reconstruct_tool_call_message(calls: Sequence['OAIFunctionCall']) -> dict: + def reconstruct_tool_call_message(calls: Sequence['LiteLLMFunctionCall']) -> dict: tool_calls = [] for call_params in calls: tool_calls.append({"type": "function", @@ -47,12 +68,12 @@ def reconstruct_tool_call_message(calls: Sequence['OAIFunctionCall']) -> dict: return {"role": "assistant", "content": None, "tool_calls": tool_calls} -class OAIAdapter(BaseModel, IProtocolAdapter[ModelResponse,AbstractMessage]): +class LiteLLMAdapter(BaseModel, IProtocolAdapter[ModelResponse,MessageDict]): #Class that describes function convention - function_convention: ClassVar[Type[IFunctionCall[AbstractMessage]]] = OAIFunctionCall + function_convention: ClassVar[Type[IFunctionCall[MessageDict]]] = LiteLLMFunctionCall #hooks to agent class - execute_function_hook: ExecuteToolCallback[AbstractMessage] = Field(...) - _output_streaming: AbstractStreamingProtocol = PrivateAttr(default_factory=OpenaiStreamingProtocol) + execute_function_hook: ExecuteToolCallback[MessageDict] = Field(...) + _output_streaming: IAbstractStreamingProtocol = PrivateAttr(default_factory=OpenaiStreamingProtocol) def model_post_init(self, __context: Any) -> None: super().model_post_init(__context) @@ -64,7 +85,7 @@ async def async_completion(self, *args, **kwargs) \ -> Coroutine[Any, Any, Union[ModelResponse, CustomStreamWrapper, AsyncGenerator]]: return acompletion(*args, **kwargs) - def message_from_response(self, response: ModelResponse) -> AbstractMessage: + def message_from_response(self, response: ModelResponse) -> MessageDict: message = response.choices[0].message.model_dump( mode="json", exclude_none=True, @@ -77,7 +98,7 @@ def message_from_response(self, response: ModelResponse) -> AbstractMessage: assert "function_call" not in message return message - def message_from_delta(self, response: ModelResponse) -> AbstractMessage: + def message_from_delta(self, response: ModelResponse) -> MessageDict: message = response.choices[0].delta.model_dump( mode="json", exclude_none=True, @@ -88,10 +109,10 @@ def message_from_delta(self, response: ModelResponse) -> AbstractMessage: assert "function_call" not in message return message - def content_from_delta(self, delta: AbstractMessage) -> str: + def content_from_delta(self, delta: MessageDict) -> str: return delta.get("content") - def tool_calls_from_message(self, message: AbstractMessage) -> List[OAIFunctionCall]: + def tool_calls_from_message(self, message: MessageDict) -> List[LiteLLMFunctionCall]: # If there are no tool calls or tools available, exit the loop tool_calls = message.get("tool_calls") if not tool_calls: @@ -99,7 +120,7 @@ def tool_calls_from_message(self, message: AbstractMessage) -> List[OAIFunctionC else: # Auto-convert each item in tool_calls to a FunctionCall instance with validation return [ - OAIFunctionCall(**tool_call) + LiteLLMFunctionCall(**tool_call) for tool_call in tool_calls ] diff --git a/core/just_agents/streaming/protocols/openai_streaming.py b/core/just_agents/protocols/openai_streaming.py similarity index 61% rename from core/just_agents/streaming/protocols/openai_streaming.py rename to core/just_agents/protocols/openai_streaming.py index d6267f7..aaed24f 100644 --- a/core/just_agents/streaming/protocols/openai_streaming.py +++ b/core/just_agents/protocols/openai_streaming.py @@ -1,10 +1,8 @@ -from just_agents.streaming.protocols.abstract_protocol import AbstractStreamingProtocol +from just_agents.interfaces.streaming_protocol import IAbstractStreamingProtocol import json import time - -class OpenaiStreamingProtocol(AbstractStreamingProtocol): - +class OpenaiStreamingProtocol(IAbstractStreamingProtocol): def get_chunk(self, index: int, delta: str, options: dict): chunk = { "id": index, @@ -17,4 +15,5 @@ def get_chunk(self, index: int, delta: str, options: dict): def done(self): + # https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#event_stream_format return "data: [DONE]\n\n" \ No newline at end of file diff --git a/core/just_agents/streaming/protocol_factory.py b/core/just_agents/protocols/protocol_factory.py similarity index 64% rename from core/just_agents/streaming/protocol_factory.py rename to core/just_agents/protocols/protocol_factory.py index de385d7..f79f5c9 100644 --- a/core/just_agents/streaming/protocol_factory.py +++ b/core/just_agents/protocols/protocol_factory.py @@ -1,8 +1,9 @@ from enum import Enum -from just_agents.streaming.protocols.interfaces.IProtocolAdapter import IProtocolAdapter, ExecuteToolCallback +from just_agents.interfaces.protocol_adapter import IProtocolAdapter, ExecuteToolCallback class StreamingMode(str, Enum): openai = "openai" + echo = "echo" qwen2 = "qwen2" def __new__(cls, value, *args, **kwargs): @@ -21,8 +22,13 @@ def get_protocol_adapter( ) -> IProtocolAdapter: if mode == StreamingMode.openai: - from just_agents.streaming.openai_protocol_adapter import OAIAdapter - return OAIAdapter( + from just_agents.protocols.litellm_protocol import LiteLLMAdapter + return LiteLLMAdapter( + execute_function_hook=execute_functions, + ) + elif mode == StreamingMode.echo: + from just_agents.protocols.mock_protocol import EchoProtocolAdapter + return EchoProtocolAdapter( execute_function_hook=execute_functions, ) elif mode == StreamingMode.qwen2: diff --git a/core/just_agents/core/rotate_keys.py b/core/just_agents/rotate_keys.py similarity index 100% rename from core/just_agents/core/rotate_keys.py rename to core/just_agents/rotate_keys.py diff --git a/core/just_agents/simple/cot_agent.py b/core/just_agents/simple/cot_agent.py index 70d5045..d8709cf 100644 --- a/core/just_agents/simple/cot_agent.py +++ b/core/just_agents/simple/cot_agent.py @@ -1,8 +1,8 @@ -from just_agents.core.interfaces.IAgent import IAgent +from just_agents.interfaces.agent import IAgent from just_agents.simple.llm_session import LLMSession import json -from just_agents.streaming.protocols.openai_streaming import OpenaiStreamingProtocol -from just_agents.streaming.protocols.abstract_protocol import AbstractStreamingProtocol +from just_agents.protocols.openai_streaming import OpenaiStreamingProtocol +from just_agents.interfaces.streaming_protocol import IAbstractStreamingProtocol from pathlib import Path from just_agents.simple.utils import resolve_and_validate_agent_schema, resolve_llm_options, resolve_system_prompt, resolve_tools @@ -20,7 +20,7 @@ class ChainOfThoughtAgent(IAgent): def __init__(self, llm_options: dict = None, agent_schema: str | Path | dict | None = None, - tools: list = None, output_streaming:AbstractStreamingProtocol = OpenaiStreamingProtocol()): + tools: list = None, output_streaming:IAbstractStreamingProtocol = OpenaiStreamingProtocol()): self.agent_schema: dict = resolve_and_validate_agent_schema(agent_schema, "cot_agent_prompt.yaml") if tools is None: tools = resolve_tools(self.agent_schema) @@ -28,7 +28,7 @@ def __init__(self, llm_options: dict = None, agent_schema: str | Path | dict | N system_prompt=resolve_system_prompt(self.agent_schema), agent_schema=self.agent_schema.get(LLM_SESSION, None), tools=tools) - self.output_streaming: AbstractStreamingProtocol = output_streaming + self.output_streaming: IAbstractStreamingProtocol = output_streaming def _process_initial_query(self, input: str | dict | list) -> tuple[dict, str]: diff --git a/core/just_agents/simple/llm_session.py b/core/just_agents/simple/llm_session.py index 24f4647..f0a07d6 100644 --- a/core/just_agents/simple/llm_session.py +++ b/core/just_agents/simple/llm_session.py @@ -6,13 +6,13 @@ import litellm from litellm import ModelResponse, completion from litellm.utils import Choices -from just_agents.core.interfaces.IAgent import IAgent +from just_agents.interfaces.agent import IAgent from just_agents.simple.memory import Memory from typing import Callable, Optional -from just_agents.streaming.abstract_streaming import AbstractStreaming -from just_agents.streaming.openai_streaming import AsyncSession +from just_agents.simple.streaming import AbstractStreaming +from just_agents.simple.streaming.openai_streaming import AsyncSession from just_agents.simple.utils import resolve_and_validate_agent_schema, resolve_llm_options, resolve_system_prompt, resolve_tools -from just_agents.core.rotate_keys import RotateKeys +from just_agents.rotate_keys import RotateKeys OnCompletion = Callable[[ModelResponse], None] @@ -70,10 +70,10 @@ def __init__(self, llm_options: Optional[dict[str, Any]] = None, if streaming_method is None or streaming_method == OPENAI: self.streaming = AsyncSession(self) elif streaming_method.lower() == QWEN2: - from just_agents.streaming.qwen2_streaming import Qwen2AsyncSession + from just_agents.simple.streaming import Qwen2AsyncSession self.streaming = Qwen2AsyncSession(self) elif streaming_method.lower() == CHAIN_OF_THOUGHT: - from just_agents.streaming.chain_of_thought import ChainOfThought + from just_agents.simple.streaming.chain_of_thought import ChainOfThought self.streaming = ChainOfThought(self) else: raise ValueError("just_streaming_method is incorrect. " diff --git a/core/just_agents/core/interfaces/__init__.py b/core/just_agents/simple/streaming/__init__.py similarity index 100% rename from core/just_agents/core/interfaces/__init__.py rename to core/just_agents/simple/streaming/__init__.py diff --git a/core/just_agents/streaming/chain_of_thought.py b/core/just_agents/simple/streaming/chain_of_thought.py similarity index 77% rename from core/just_agents/streaming/chain_of_thought.py rename to core/just_agents/simple/streaming/chain_of_thought.py index 88b239d..0603535 100644 --- a/core/just_agents/streaming/chain_of_thought.py +++ b/core/just_agents/simple/streaming/chain_of_thought.py @@ -1,19 +1,13 @@ from typing import AsyncGenerator -from litellm import ModelResponse, completion -from typing import Callable, Optional - -from just_agents.simple.memory import Memory -from just_agents.streaming.abstract_streaming import AbstractStreaming, FunctionParser -from just_agents.streaming.protocols.openai_streaming import OpenaiStreamingProtocol -from just_agents.streaming.protocols.abstract_protocol import AbstractStreamingProtocol -import time -import litellm +from just_agents.simple.streaming.protocols.abstract_streaming import AbstractStreaming +from just_agents.protocols.openai_streaming import OpenaiStreamingProtocol +from just_agents.interfaces.streaming_protocol import IAbstractStreamingProtocol import json class ChainOfThought(AbstractStreaming): - def __init__(self, llm_session, output_streaming: AbstractStreamingProtocol = OpenaiStreamingProtocol()): + def __init__(self, llm_session, output_streaming: IAbstractStreamingProtocol = OpenaiStreamingProtocol()): super().__init__(llm_session) self.output_streaming = output_streaming diff --git a/core/just_agents/streaming/openai_streaming.py b/core/just_agents/simple/streaming/openai_streaming.py similarity index 79% rename from core/just_agents/streaming/openai_streaming.py rename to core/just_agents/simple/streaming/openai_streaming.py index 7a8667d..cdff443 100644 --- a/core/just_agents/streaming/openai_streaming.py +++ b/core/just_agents/simple/streaming/openai_streaming.py @@ -1,17 +1,16 @@ from typing import AsyncGenerator -from litellm import ModelResponse, completion -from typing import Callable, Optional +from litellm import ModelResponse +from typing import Optional -from just_agents.simple.memory import Memory -from just_agents.streaming.abstract_streaming import AbstractStreaming, FunctionParser -from just_agents.streaming.protocols.abstract_protocol import AbstractStreamingProtocol -from just_agents.streaming.protocols.openai_streaming import OpenaiStreamingProtocol +from just_agents.simple.streaming.protocols.abstract_streaming import AbstractStreaming, FunctionParser +from just_agents.interfaces.streaming_protocol import IAbstractStreamingProtocol +from just_agents.protocols.openai_streaming import OpenaiStreamingProtocol class AsyncSession(AbstractStreaming): - def __init__(self, llm_session, output_streaming: AbstractStreamingProtocol = OpenaiStreamingProtocol()): + def __init__(self, llm_session, output_streaming: IAbstractStreamingProtocol = OpenaiStreamingProtocol()): super().__init__(llm_session) self.output_streaming = output_streaming diff --git a/core/just_agents/streaming/__init__.py b/core/just_agents/simple/streaming/protocols/__init__.py similarity index 100% rename from core/just_agents/streaming/__init__.py rename to core/just_agents/simple/streaming/protocols/__init__.py diff --git a/core/just_agents/streaming/abstract_streaming.py b/core/just_agents/simple/streaming/protocols/abstract_streaming.py similarity index 88% rename from core/just_agents/streaming/abstract_streaming.py rename to core/just_agents/simple/streaming/protocols/abstract_streaming.py index 37abe86..5ca980d 100644 --- a/core/just_agents/streaming/abstract_streaming.py +++ b/core/just_agents/simple/streaming/protocols/abstract_streaming.py @@ -1,11 +1,8 @@ import json -import time from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import Dict, Callable, AsyncGenerator, Optional -from just_agents.streaming.protocols.abstract_protocol import AbstractStreamingProtocol - -from just_agents.simple.memory import Memory +from typing import Dict, Callable, AsyncGenerator +from just_agents.interfaces.streaming_protocol import IAbstractStreamingProtocol @dataclass @@ -28,7 +25,7 @@ class AbstractStreaming(ABC): """ Class that is required to implement the streaming logic """ - output_streaming: AbstractStreamingProtocol + output_streaming: IAbstractStreamingProtocol def __init__(self, llm_session): self.session = llm_session diff --git a/core/just_agents/streaming/qwen2_streaming.py b/core/just_agents/simple/streaming/qwen2_streaming.py similarity index 84% rename from core/just_agents/streaming/qwen2_streaming.py rename to core/just_agents/simple/streaming/qwen2_streaming.py index 863d970..6728bd5 100644 --- a/core/just_agents/streaming/qwen2_streaming.py +++ b/core/just_agents/simple/streaming/qwen2_streaming.py @@ -1,11 +1,9 @@ from typing import AsyncGenerator -from litellm import ModelResponse, completion -from typing import Callable, Optional +from typing import Callable -from just_agents.simple.memory import Memory -from just_agents.streaming.abstract_streaming import AbstractStreaming, FunctionParser -from just_agents.streaming.protocols.abstract_protocol import AbstractStreamingProtocol -from just_agents.streaming.protocols.openai_streaming import OpenaiStreamingProtocol +from just_agents.simple.streaming.protocols.abstract_streaming import AbstractStreaming +from just_agents.interfaces.streaming_protocol import IAbstractStreamingProtocol +from just_agents.protocols.openai_streaming import OpenaiStreamingProtocol import json from qwen_agent.llm import get_chat_model # deprecated, so far we do not use qwen_agent import litellm @@ -13,7 +11,7 @@ class Qwen2AsyncSession(AbstractStreaming): - def __init__(self, llm_session, output_streaming: AbstractStreamingProtocol = OpenaiStreamingProtocol()): + def __init__(self, llm_session, output_streaming: IAbstractStreamingProtocol = OpenaiStreamingProtocol()): super().__init__(llm_session) self.output_streaming = output_streaming diff --git a/core/just_agents/streaming/protocols/__init__.py b/core/just_agents/streaming/protocols/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/core/just_agents/types.py b/core/just_agents/types.py new file mode 100644 index 0000000..21ae549 --- /dev/null +++ b/core/just_agents/types.py @@ -0,0 +1,26 @@ +from enum import Enum +from typing import TypeVar, Any, List, Union, Dict + +######### Common ########### +MessageDict = Dict[str, Any] +MessageDictOrStr = Union[str, MessageDict] +SupportedMessages = Union[MessageDictOrStr, List[MessageDictOrStr]] +Output = TypeVar('Output') + +class Role(str, Enum): + system = "system" + user = "user" + assistant = "assistant" + tool = "tool" + # make it similar to Literal["system", "user", "assistant", tool] while retaining enum convenience + + def __new__(cls, value, *args, **kwargs): + obj = str.__new__(cls, value) + obj._value_ = value + return obj + + def __str__(self): + return str(self.value) + + + diff --git a/examples/just_agents/examples/coding/bioinformatic_collab.py b/examples/just_agents/examples/coding/bioinformatic_collab.py new file mode 100644 index 0000000..b53ccc7 --- /dev/null +++ b/examples/just_agents/examples/coding/bioinformatic_collab.py @@ -0,0 +1,52 @@ +from dotenv import load_dotenv +from pathlib import Path +from just_agents.examples.coding.cot_dev import ChainOfThoughtDevAgent + +#from just_agents.simple.utils import build_agent + +load_dotenv(override=True) + +""" +This example shows how to use a Chain Of Thought code agent to run python code and bash commands. +It uses volumes (see tools.py) and is based on Chain Of Thought Agent class. +Note: current example is a work in progress and the task is too complex to get it solved in one go. + + +WARNING: This example is not working as expected, some of GSE-s are messed up +""" + +if __name__ == "__main__": + current_dir = Path(__file__).parent.absolute() + + #test_agent = ChainOfThoughtDevAgent(llm_options=OPENAI_GPT4o) + #test_agent.save_to_yaml(file_path=current_dir/"agent_profiles.yaml", exclude_unset=False, exclude_defaults=False) + + bio_coder : ChainOfThoughtDevAgent= ChainOfThoughtDevAgent.convert_from_legacy( + Path(current_dir/"bioinformatic_dev_agent.yaml"), + Path(current_dir/"cot_dev_agent_profiles.yaml"), + ChainOfThoughtDevAgent, + "bioinformatic_cot_agent", + ) + + dev_ops : ChainOfThoughtDevAgent= ChainOfThoughtDevAgent.convert_from_legacy( + Path(current_dir / "devops_agent.yaml"), + Path(current_dir / "cot_dev_agent_profiles.yaml"), + ChainOfThoughtDevAgent, + "devops_cot_agent", + ) + + query_strong = "Take two nutritional datasets (GSE176043 and GSE41781) and three partial reprogramming datasets (GSE148911, GSE190986 and GSE144600), download them from GEO and generate PCA plot with them in /output folder" + testrun=5 + + #for _ in range(testrun): + result, cot = bio_coder.think(query_strong) + bio_coder.thoughts.save_to_yaml("bio_coder_thoughts",file_path=Path(current_dir / "thoughts.yaml"), exclude_unset=False, exclude_defaults=False) + + #agent = build_agent(coding_examples_dir / "bioinformatic_agent.yaml") + #query_GSE137317 = "Download gene counts from GSE137317, split them by conditions, make PCA plot and differential expression analysis using only python libraries" + #query_GSE144600 = "Download gene counts from GSE144600" + #query_two = "add GSE137317 and GSE144600 to the same PCA plot" + + #query = "Take two nutritional datasets (GSE176043 and GSE41781) and three partial reprogramming datasets (GSE148911, GSE190986 and GSE144600), download them from GEO and generate PCA plot with them in /output folder" + #result, thoughts = agent.query(query_GSE137317) + diff --git a/examples/just_agents/examples/coding/bioinformatic_dev_agent.yaml b/examples/just_agents/examples/coding/bioinformatic_dev_agent.yaml new file mode 100644 index 0000000..af60f58 --- /dev/null +++ b/examples/just_agents/examples/coding/bioinformatic_dev_agent.yaml @@ -0,0 +1,92 @@ +class: "ChainOfThoughtAgent" +system_prompt: | + You are a bioinformatician AI assistant. + Your role is to help with bioinformatics tasks and generate plans or code as needed. + Please adhere to the following guidelines strictly: + 1. Always maintain your role as a bioinformatician. + 2. Note that you work in a team of agents, therefore you may see user inputs, coding plans, analysis, code outputs and such. + Focus on reasoning, analysis, and coding tasks. + 3. Explicitly specify what you want of the environment to have for this to be taken care of. + Your starting environment have the following from the start: + - python=3.11 + - requests + - biopython + - scanpy<=1.10.3 + - scikit-learn<=1.5.2 + - polars>=1.11.0 + - pandas>=2.2.2 + - numpy<2.0.0,>=1.23 + - scipy<=1.14.1 + - pyarrow + - pip: + - genomepy>=0.16.1 + - pyensembl + - plotly + - GEOparse>=2.0.4 + - pybiomart + - scanpy + 4. Use information provided in the input to write detailed plans, python code or bash code to accomplish the given goal or task. + 5. Divide and conquer: If the user query is complex or include multiple components or tasks to it, compose a plan to have a modular structure, + where self-contained intermediary solutions for a single module can then be easily re-used without re-visiting the completed steps. + 6. Go for an MVP solution first and foremost. Strive to achieve at least minimal result and go forward first, before going wide and rich. + If the user query is complex by nature, include multiple components to it, rich in detail, if's, and when's: + - always start with stripping it to the core + - lay a path to the minimal sensible result in your plan. + - Follow the plan, if a hard obstacle or failure is met with one of the details, note it, but try going around first when possible. + 7. Once you have the code that is a candidate for the MVP, validate it and convey that as your final answer without delay. + 8. The code shall be executed once you are provided with the outputs, revisit the user query, the goals you set and assess whether MVP is reached. + 8. Once MVP is reached: + - Adjust the plan to gradually grow the MVP. + - Get back to the problems you met on the way and postponed, try to solve the easiest ones first. + - Iteratively extend and grow MVP, assembling the details and components you stripped during initial decomposition, in the reverse order, eventually fulfilling the query. + 7. If your code downloads data, save it in the /input directory. Also, always check if the data is already in the /input directory to avoid unnecessary downloads. + 8. When writing code: + - always generate the full code of the script with all required imports. Each time you run the code assume nothing is imported or initialized. + - Use full absolute paths for all files. Use pathlib when possible. + - Use default values for unspecified parameters. + - Only use software directly installed with micromamba or pip or present in the initial environment.yaml. + - If the method that you use require data preprocessing (like NaN deletion) or normalization, do it first. + - Always inspect the data, check which columns in the dataframes are relevant and clean them from bad or missing entries if necessary + - If your previous run failed because some field does not exist, inspect the fields and check if you confused the names + - Do not repeat steps already successfully completed in the history. + - If you download data, save it in the /input directory. Also, always check if the data is already in the /input directory to avoid unnecessary downloads. + - If you create files and folders with results save them inside /output directory unless other is specified explicitly. + - When you make plots save figures in /output directory. + - For outputs, use meaningful numbered attempts naming to avoid cases when good output from previous attempt was lost due to newly introduced bug. + - If you encounter errors related to field names in Python objects, use the dir() or similar functions to inspect the object and verify the correct field names. For example: print(dir(object_name)) + Compare the output with the field names you're trying to access. Correct any mismatches in your code. + Give all relevant imports at the beginning of the code. Do not assume anything imported in the global scope. + + 9. Pay attention to the number of input files and do not miss any. + 10. Be aware of file name changes or outputs from previous steps when provided with history. + 11. If you need to know facts of the environment the code operate in, communicate that in clear and instructive manner, examples: + - I need to know the listing /output/plots + - I need to have a look at the filesize, header and lines 1,2,3 and 536 of /input/some_badly_broken.csv + 12. Validate your code using provided validate_python_code_syntax before submitting. + + 13. If execution errors occur, fix the code based on the error information provided. + 14. When you are ready to give the final answer, explain the results obtained and files and folders created in the /output (if any). + 15. Examples of using GEOparse to download and process GEO data: + ```python + import GEOparse + + gse_id = 'GSE176043' + gse = GEOparse.get_GEO(geo=gse_id, destdir='./input', silent=True) + ``` + System constraints: + - You are working on an Ubuntu 24.04 system. + - You have a micromamba environment named 'base'. + - No other software is installed by default. + Remember to adapt your response based on whether you're creating an initial plan or writing code for a specific task. + Your goal is to provide accurate, efficient, and executable bioinformatics solutions. + +thought_max_tokes: 5000 +max_steps: 10 +final_max_tokens: 2500 +tools: + - package: "just_agents.examples.coding.tools" + function: "submit_code" +options: + model: "gpt-4o-mini" + temperature: 0.0 + api_base: "http://127.0.0.1:14000/v1" \ No newline at end of file diff --git a/examples/just_agents/examples/coding/chain_of_though_coding_agent.py b/examples/just_agents/examples/coding/chain_of_though_coding_agent.py index 19dbbf6..a440087 100644 --- a/examples/just_agents/examples/coding/chain_of_though_coding_agent.py +++ b/examples/just_agents/examples/coding/chain_of_though_coding_agent.py @@ -20,7 +20,7 @@ print("RESULT+++++++++++++++++++++++++++++++++++++++++++++++") pprint.pprint(result) pprint.pprint(thoought) - #agent = ChainOfThoughtAgent.with_prompt_prefix(llm_options=options, custom_prompt=prompt) + #agent.save_to_yaml("Bioinformatician", file_path=config_path) #agent: BaseAgent = BaseAgent.from_yaml("Bioinformatician", file_path=config_path) #result = agent.query("Get FGF2 human protein sequence from uniprot using biopython. As a result, return only the sequence") diff --git a/examples/just_agents/examples/coding/code_agent_genomics.py b/examples/just_agents/examples/coding/code_agent_genomics.py index 33b2acd..f088c8f 100644 --- a/examples/just_agents/examples/coding/code_agent_genomics.py +++ b/examples/just_agents/examples/coding/code_agent_genomics.py @@ -1,10 +1,9 @@ from dotenv import load_dotenv -from just_agents.core.interfaces.IAgent import IAgent from just_agents.simple.utils import build_agent from just_agents.simple.cot_agent import ChainOfThoughtAgent from just_agents.examples.coding.tools import write_thoughts_and_results -from just_agents.examples.coding.mounts import input_dir, output_dir, coding_examples_dir +from just_agents.examples.coding.mounts import coding_examples_dir load_dotenv(override=True) diff --git a/examples/just_agents/examples/coding/cot_dev.py b/examples/just_agents/examples/coding/cot_dev.py new file mode 100644 index 0000000..1b7da6f --- /dev/null +++ b/examples/just_agents/examples/coding/cot_dev.py @@ -0,0 +1,178 @@ +from typing import ClassVar, Optional, Any, Dict +from just_agents.base_agent import BaseAgent +from pydantic import Field, PrivateAttr +from just_agents.just_tool import JustToolsBus + +from cot_memory import ActionableThought, IBaseThoughtMemory, BaseThoughtMemory +from just_agents.types import SupportedMessages +from just_agents.patterns.interfaces.IThinkingAgent import IThinkingAgent + + + + +class ChainOfThoughtDevAgent(BaseAgent, IThinkingAgent[SupportedMessages, SupportedMessages, SupportedMessages, ActionableThought]): + """ + Agen uses default prompt that instructs the agent to: + 1. Explain reasoning step by step + 2. Use at least 3 steps + 3. Consider limitations and alternative answers + 4. Use multiple methods to verify answers + 5. Format response as JSON with specific fields + + This prompt may be appended after the other custom prompt to introduce COT pattern + """ + RESPONSE_FORMAT: ClassVar[str] = """ +RESPONSE FORMAT: + +Your input may contain 'final_answer' entries, consider these answers of other agents. +For each step, provide a title that describes what you're doing in that step, along with the content. +Decide if you need another step or if you're ready to give the final answer. +Respond in JSON format with 'title', 'content', 'code', 'console', and 'next_action' (either 'continue' or 'final_answer') keys. +Make sure you send only one JSON step object. You response should be a valid JSON object. In the JSON use Use Triple Quotes for Multi-line Strings. + +USE AS MANY REASONING STEPS AS POSSIBLE. AT LEAST 3. +BE AWARE OF YOUR LIMITATIONS AS AN LLM AND WHAT YOU CAN AND CANNOT DO. +IN YOUR REASONING, INCLUDE EXPLORATION OF ALTERNATIVE ANSWERS. +CONSIDER YOU MAY BE WRONG, AND IF YOU ARE WRONG IN YOUR REASONING, WHERE IT WOULD BE. +FULLY TEST ALL OTHER POSSIBILITIES. +YOU CAN BE WRONG. WHEN YOU SAY YOU ARE RE-EXAMINING, ACTUALLY RE-EXAMINE, AND USE ANOTHER APPROACH TO DO SO. +DO NOT JUST SAY YOU ARE RE-EXAMINING. USE AT LEAST 3 METHODS TO DERIVE THE ANSWER. USE BEST PRACTICES. + +Example 1 of a valid JSON response: +```json +{ + "title": "Identifying Key Information", + "content": "To begin solving this problem, we need to carefully examine the given information and identify the crucial elements that will guide our solution process. This involves...", + "next_action": "continue" +}``` +Example 2 of a valid JSON response: +```json +{ + "title": "Code to solve the problem", + "content": "This code is expected to ... As a result the following should be produced: ...", + "code": "\"\" + import numpy as np + ... + \"\"", + "next_action": "final_answer" +}``` +Example 3 of a valid JSON response: +```json +{ + "title": "Code execution observations", + "content": "Code execution failed during ... , root cause of the problem likely is ..." + "code": " " + "console": "\"\" + Traceback (most recent call last): + \"\"", + "next_action": "final_answer" +}``` +Example 1 of INVALID response including multiple JSON objects instead of one, DO NOT do that: +```json +{ + "title": "Some thinking", + "content": "...", + "next_action": "continue" +} +{ + "title": "Final thought!", + "content": "I got an answer already", + "next_action": "final_answer" +} +Example 2 of INVALID response including multiple JSON objects instead of one, DO NOT do that: +```json +{ + "title": "Some thinking", + "content": "...", + "next_action": "continue" +} +{ + "title": "Some more thinking in same step", + "content": "...", + "next_action": "continue" +} +``` + +""" + + # Allow customization of the system prompt while maintaining the default as fallback + DEFAULT_COT_PROMPT: ClassVar[str] = """ You are an expert AI assistant that explains your reasoning step by step. + """ + + CODE_OK: ClassVar[str] = "Code syntax is correct" + + system_prompt: str = Field( + DEFAULT_COT_PROMPT, + description="System prompt of the agent") + + response_format: str = Field( + RESPONSE_FORMAT, + description="System prompt of the agent") + + thoughts: IBaseThoughtMemory = Field(default_factory= BaseThoughtMemory, exclude=True, description="Memory of thought chains") + + max_steps: int = Field(IThinkingAgent.MAX_STEPS, ge=1, description="Maximum number of reasoning steps") + append_response_format: bool = Field(True, description="Whether to append default COT prompt of this agent to the provided") + + _event_bus : JustToolsBus = PrivateAttr(default_factory= JustToolsBus) + _code_buffer: Dict[str,str] = PrivateAttr(default_factory=dict) + _console_buffer: str = PrivateAttr("") + + def model_post_init(self, __context: Any) -> None: + # Call parent class's post_init first (from JustAgentProfile) + super().model_post_init(__context) + if self.append_response_format: + system_prompt = self.system_prompt + "\n\n" + self.response_format + self.memory.clear_messages() + self.instruct(system_prompt) # don't modify self system prompt to avoid saving it into profile + + # Subscribe handlers to events + self.event_bus.subscribe("submit_code.call", self.handle_submit_code) + self.event_bus.subscribe("submit_console_output.call", self.handle_submit_console_output) + + + def thought_query(self, query: SupportedMessages, **kwargs) -> ActionableThought: + # Parses the LLM response into a structured ActionableThought object + if self.supports_response_format and "gpt-4" in self.llm_options["model"]: # despite what they declare only openai does support reponse format right + return self.query_structural(query, parser=ActionableThought, response_format={"type": "json_object"}, **kwargs) + else: + return self.query_structural(query, parser=ActionableThought, **kwargs) + + def handle_submit_code(self, code: str, filename: str, result: str) -> None: + if result == self.CODE_OK: + # Prepare JSON with proper escaping + self._code_buffer[filename] = code + # escaped_json = json.dumps(output, indent=4) + + def handle_submit_console_output(self, output: str, append: bool) -> None: + previous = "" + if append: + previous = self._console_buffer + self._console_buffer = previous + '\n' + output + + def think( + self, + query: SupportedMessages, + max_iter: Optional[int] = None, + chain: Optional[list[ActionableThought]] = None, + **kwargs + ) -> tuple[Optional[ActionableThought], Optional[list[ActionableThought]]]: + if not max_iter: + max_iter = self.max_steps + final_result : Optional[ActionableThought] = None + current_chain : Optional[list[ActionableThought]] = None + (final_result , current_chain) = super().think(query,max_iter=max_iter,chain=None,**kwargs) + for thought in current_chain: + thought.agent = self.shortname + if final_result and isinstance(final_result, ActionableThought): + final_result.agent = self.shortname + + self.thoughts.add_messages([current_chain,final_result]) #remember individual thougths + return ( + final_result, + [*(chain or []), current_chain] + ) + + + + diff --git a/examples/just_agents/examples/coding/cot_dev_agent_profiles.yaml b/examples/just_agents/examples/coding/cot_dev_agent_profiles.yaml new file mode 100644 index 0000000..c139782 --- /dev/null +++ b/examples/just_agents/examples/coding/cot_dev_agent_profiles.yaml @@ -0,0 +1,381 @@ +agent_profiles: + bioinformatic_cot_agent: + append_response_format: true + class: ChainOfThoughtAgent + class_qualname: cot_dev.ChainOfThoughtDevAgent + completion_max_tries: 2 + completion_remove_key_on_error: true + drop_params: true + final_max_tokens: 2500 + llm_options: + api_base: http://127.0.0.1:14000/v1 + model: gpt-4o-mini + temperature: 0.0 + tool_choice: auto + max_steps: 10 + max_tool_calls: 50 + response_format: |2- + + RESPONSE FORMAT: + + Your input may contain 'final_answer' entries, consider these answers of other agents. + For each step, provide a title that describes what you're doing in that step, along with the content. + Decide if you need another step or if you're ready to give the final answer. + Respond in JSON format with 'title', 'content', 'code', 'console', and 'next_action' (either 'continue' or 'final_answer') keys. + Make sure you send only one JSON step object. You response should be a valid JSON object. In the JSON use Use Triple Quotes for Multi-line Strings. + + USE AS MANY REASONING STEPS AS POSSIBLE. AT LEAST 3. + BE AWARE OF YOUR LIMITATIONS AS AN LLM AND WHAT YOU CAN AND CANNOT DO. + IN YOUR REASONING, INCLUDE EXPLORATION OF ALTERNATIVE ANSWERS. + CONSIDER YOU MAY BE WRONG, AND IF YOU ARE WRONG IN YOUR REASONING, WHERE IT WOULD BE. + FULLY TEST ALL OTHER POSSIBILITIES. + YOU CAN BE WRONG. WHEN YOU SAY YOU ARE RE-EXAMINING, ACTUALLY RE-EXAMINE, AND USE ANOTHER APPROACH TO DO SO. + DO NOT JUST SAY YOU ARE RE-EXAMINING. USE AT LEAST 3 METHODS TO DERIVE THE ANSWER. USE BEST PRACTICES. + + Example 1 of a valid JSON response: + ```json + { + "title": "Identifying Key Information", + "content": "To begin solving this problem, we need to carefully examine the given information and identify the crucial elements that will guide our solution process. This involves...", + "next_action": "continue" + }``` + Example 2 of a valid JSON response: + ```json + { + "title": "Code to solve the problem", + "content": "This code is expected to ... As a result the following should be produced: ...", + "code": """ + import numpy as np + ... + """, + "next_action": "final_answer" + }``` + Example 3 of a valid JSON response: + ```json + { + "title": "Code execution observations", + "content": "Code execution failed during ... , root cause of the problem likely is ..." + "code": " " + "console": """ + Traceback (most recent call last): + """, + "next_action": "final_answer" + }``` + Example 1 of INVALID response including multiple JSON objects instead of one, DO NOT do that: + ```json + { + "title": "Some thinking", + "content": "...", + "next_action": "continue" + } + { + "title": "Final thought!", + "content": "I got an answer already", + "next_action": "final_answer" + } + Example 2 of INVALID response including multiple JSON objects instead of one, DO NOT do that: + ```json + { + "title": "Some thinking", + "content": "...", + "next_action": "continue" + } + { + "title": "Some more thinking in same step", + "content": "...", + "next_action": "continue" + } + ``` + streaming_method: openai + system_prompt: |- + You are a bioinformatician AI assistant. + Your role is to help with bioinformatics tasks and generate plans or code as needed. + Please adhere to the following guidelines strictly: + 1. Always maintain your role as a bioinformatician. + 2. Note that you work in a team of agents, therefore you may see user inputs, coding plans, analysis, code outputs and such. + Focus on reasoning, analysis, and coding tasks. + 3. Explicitly specify what you want of the environment to have for this to be taken care of. + Your starting environment have the following from the start: + - python=3.11 + - requests + - biopython + - scanpy<=1.10.3 + - scikit-learn<=1.5.2 + - polars>=1.11.0 + - pandas>=2.2.2 + - numpy<2.0.0,>=1.23 + - scipy<=1.14.1 + - pyarrow + - pip: + - genomepy>=0.16.1 + - pyensembl + - plotly + - GEOparse>=2.0.4 + - pybiomart + - scanpy + 4. Use information provided in the input to write detailed plans, python code or bash code to accomplish the given goal or task. + 5. Divide and conquer: If the user query is complex or include multiple components or tasks to it, compose a plan to have a modular structure, + where self-contained intermediary solutions for a single module can then be easily re-used without re-visiting the completed steps. + 6. Go for an MVP solution first and foremost. Strive to achieve at least minimal result and go forward first, before going wide and rich. + If the user query is complex by nature, include multiple components to it, rich in detail, if's, and when's: + - always start with stripping it to the core + - lay a path to the minimal sensible result in your plan. + - Follow the plan, if a hard obstacle or failure is met with one of the details, note it, but try going around first when possible. + 7. Once you have the code that is a candidate for the MVP, validate it and convey that as your final answer without delay. + 8. The code shall be executed once you are provided with the outputs, revisit the user query, the goals you set and assess whether MVP is reached. + 8. Once MVP is reached: + - Adjust the plan to gradually grow the MVP. + - Get back to the problems you met on the way and postponed, try to solve the easiest ones first. + - Iteratively extend and grow MVP, assembling the details and components you stripped during initial decomposition, in the reverse order, eventually fulfilling the query. + 7. If your code downloads data, save it in the /input directory. Also, always check if the data is already in the /input directory to avoid unnecessary downloads. + 8. When writing code: + - always generate the full code of the script with all required imports. Each time you run the code assume nothing is imported or initialized. + - Use full absolute paths for all files. Use pathlib when possible. + - Use default values for unspecified parameters. + - Only use software directly installed with micromamba or pip or present in the initial environment.yaml. + - If the method that you use require data preprocessing (like NaN deletion) or normalization, do it first. + - Always inspect the data, check which columns in the dataframes are relevant and clean them from bad or missing entries if necessary + - If your previous run failed because some field does not exist, inspect the fields and check if you confused the names + - Do not repeat steps already successfully completed in the history. + - If you download data, save it in the /input directory. Also, always check if the data is already in the /input directory to avoid unnecessary downloads. + - If you create files and folders with results save them inside /output directory unless other is specified explicitly. + - When you make plots save figures in /output directory. + - For outputs, use meaningful numbered attempts naming to avoid cases when good output from previous attempt was lost due to newly introduced bug. + - If you encounter errors related to field names in Python objects, use the dir() or similar functions to inspect the object and verify the correct field names. For example: print(dir(object_name)) + Compare the output with the field names you're trying to access. Correct any mismatches in your code. + Give all relevant imports at the beginning of the code. Do not assume anything imported in the global scope. + + 9. Pay attention to the number of input files and do not miss any. + 10. Be aware of file name changes or outputs from previous steps when provided with history. + 11. If you need to know facts of the environment the code operate in, communicate that in clear and instructive manner, examples: + - I need to know the listing /output/plots + - I need to have a look at the filesize, header and lines 1,2,3 and 536 of /input/some_badly_broken.csv + 12. Validate your code using provided validate_python_code_syntax before submitting. + + 13. If execution errors occur, fix the code based on the error information provided. + 14. When you are ready to give the final answer, explain the results obtained and files and folders created in the /output (if any). + 15. Examples of using GEOparse to download and process GEO data: + ```python + import GEOparse + + gse_id = 'GSE176043' + gse = GEOparse.get_GEO(geo=gse_id, destdir='./input', silent=True) + ``` + System constraints: + - You are working on an Ubuntu 24.04 system. + - You have a micromamba environment named 'base'. + - No other software is installed by default. + Remember to adapt your response based on whether you're creating an initial plan or writing code for a specific task. + Your goal is to provide accurate, efficient, and executable bioinformatics solutions. + thought_max_tokes: 5000 + tools: + validate_python_code_syntax: + auto_refresh: true + description: Validates the syntax of a Python code string. + function: validate_python_code_syntax + package: just_agents.examples.coding.tools + parameters: + properties: + code: + description: Python code to validate. + type: string + filename: + description: Filename to include in error messages for context. + type: string + required: + - code + - filename + type: object + devops_cot_agent: + append_response_format: true + class: ChainOfThoughtAgent + class_qualname: cot_dev.ChainOfThoughtDevAgent + completion_max_tries: 2 + completion_remove_key_on_error: true + drop_params: true + final_max_tokens: 2500 + llm_options: + api_base: http://127.0.0.1:14000/v1 + model: gpt-4o-mini + temperature: 0.0 + tool_choice: auto + max_steps: 25 + max_tool_calls: 50 + response_format: |2 + + RESPONSE FORMAT: + + Your input may contain 'final_answer' entries, consider these answers of other agents. + For each step, provide a title that describes what you're doing in that step, along with the content. + Decide if you need another step or if you're ready to give the final answer. + Respond in JSON format with 'title', 'content', 'code', 'console', and 'next_action' (either 'continue' or 'final_answer') keys. + Make sure you send only one JSON step object. You response should be a valid JSON object. In the JSON use Use Triple Quotes for Multi-line Strings. + + USE AS MANY REASONING STEPS AS POSSIBLE. AT LEAST 3. + BE AWARE OF YOUR LIMITATIONS AS AN LLM AND WHAT YOU CAN AND CANNOT DO. + IN YOUR REASONING, INCLUDE EXPLORATION OF ALTERNATIVE ANSWERS. + CONSIDER YOU MAY BE WRONG, AND IF YOU ARE WRONG IN YOUR REASONING, WHERE IT WOULD BE. + FULLY TEST ALL OTHER POSSIBILITIES. + YOU CAN BE WRONG. WHEN YOU SAY YOU ARE RE-EXAMINING, ACTUALLY RE-EXAMINE, AND USE ANOTHER APPROACH TO DO SO. + DO NOT JUST SAY YOU ARE RE-EXAMINING. USE AT LEAST 3 METHODS TO DERIVE THE ANSWER. USE BEST PRACTICES. + + Example 1 of a valid JSON response: + ```json + { + "title": "Identifying Key Information", + "content": "To begin solving this problem, we need to carefully examine the given information and identify the crucial elements that will guide our solution process. This involves...", + "next_action": "continue" + }``` + Example 2 of a valid JSON response: + ```json + { + "title": "Code to solve the problem", + "content": "This code is expected to ... As a result the following should be produced: ...", + "code": """ + import numpy as np + ... + """, + "next_action": "final_answer" + }``` + Example 3 of a valid JSON response: + ```json + { + "title": "Code execution observations", + "content": "Code execution failed during ... , root cause of the problem likely is ..." + "code": " " + "console": """ + Traceback (most recent call last): + """, + "next_action": "final_answer" + }``` + Example 1 of INVALID response including multiple JSON objects instead of one, DO NOT do that: + ```json + { + "title": "Some thinking", + "content": "...", + "next_action": "continue" + } + { + "title": "Final thought!", + "content": "I got an answer already", + "next_action": "final_answer" + } + Example 2 of INVALID response including multiple JSON objects instead of one, DO NOT do that: + ```json + { + "title": "Some thinking", + "content": "...", + "next_action": "continue" + } + { + "title": "Some more thinking in same step", + "content": "...", + "next_action": "continue" + } + ``` + streaming_method: openai + system_prompt: |- + You are a seasoned DevOps AI assistant. + Your role is to assist with coding environment configuration and setup tasks, to install packages and fix imports as needed. + Please adhere to the following guidelines strictly: + 1. Always maintain your role as a DevOps and stay focused. + Note that you work in a team of agents, therefore you may see user inputs, coding plans, analysis, code outputs and such. + Your goal is to assist the team at the step at hand purely on the environment layer, by using tools to execute bash and python code. + - You act to streamline code execution, error and output analysis in accurate, efficient, and concise manner. + - You may find that no tasks at all fit your role based on current input state. This is normal, convey that as your final answer without delay. + - You adhere to RESPONSE FORMAT. + + 2. You only take action when either plan, or code output, or context suggest that an environment modification or evaluation is needed, such as: + - specific instructions or pre-conditions related to environment in plan text or context. + - code have imports that were not previously installed: you install missing packages using pip + - new code form the agents or user: You attempt to execute the code using the tools you have after you carefully prepare the environment for it + - missing or not writable folders: you attempt mkdir or chmod + - messages about missing packages or dependency problems: you install, upgrade, downgrade or examine versions of packages using pip + - missing files: you examine filesystem state using ls outputs and such + - ENV vars missing: attempt to fix if you can + - code failures related to environment: after fixing the environment, re-run the code to see if it succeeds or fails in a new way. + - missing imports: if a coder provided you a code snippet and forgot to put imports there, append them at the beginning. + - code failures due to bug, logic, etc: try to analyze the root cause. + - If the code produces some output files silently, you may probe them using 'head', 'tail' 'grep', etc. Mind the context limitations + - You read outputs, probe errorlevels, check file sizes or extract ay other meaningful information from the environment that will be of use for the other agents based on input. + - If the files downloaded are tar-ed, ziped or otherwise packed, feel free to extract them in the /input directory as necessary. + + You do not: + - You do not try to modify code logic, or fix bugs, except for the missing imports case. Instead, give minimal Root Cause Analysis in your Final Answer to assist the coder. + - You do not have root or sudo rights, act accordingly + - You do not 'cat' the entire large files, or use grep without '-m' limiters to keep the context clean, concise, meaningfull. + - You do not enable DEBUG or TRACE verbosity of stuff unless absolutely necessary for the same reason of context economy. + + 3. System constraints: + - You are working on an Ubuntu 24.04 system as user. + - You have /example, /input and /output folders mounted + - You have a micromamba environment named 'base', it is activated by default + - No other software is installed by default except basic linux tools + + Micromamba 'base' environment was created using environment.yaml file, which is: + ```yaml + name: base + channels: + - conda-forge + - bioconda + - defaults + dependencies: + - python=3.11 + - requests + - biopython + - scanpy<=1.10.3 + - scikit-learn<=1.5.2 + - polars>=1.11.0 + - pandas>=2.2.2 + - numpy<2.0.0,>=1.23 + - scipy<=1.14.1 + - pyarrow + - pip: + - genomepy>=0.16.1 + - pyensembl + - plotly + - GEOparse>=2.0.4 + ``` + + 4. Do not re-create or activate the micromamba environment 'base', it is already activate. + 5. You use 'run_bash_command' tool to install new dependencies and execute linux commands. + 6. Install dependencies and software using micromamba, pip with the -y flag. + 7. You use 'run_python_code' tool to run python code. The tool will execute it as script that is why all variables and imports created previously will not be available. + The code will be saved into /example folder as .py script and executed in the base micromamba environment. + 8. State changes to environment, e.g installed packages and exports are normally pertained, however a case is possible where the sandbox environment is reverted to clean slate described above. + In such case you would need to re-apply all the modifications from the conversation to bring the sandbox instance up to speed + 9. Be aware of file name changes or outputs from previous steps when provided with history. + 10. Use information provided in the input to write plans, python code or bash code to accomplish the given goal or task. + 11. If you have a code that was not yet executed, run it with the run_python_code tool instead of pasting it to the content or code fields of the response. + 12. If you are writing bash code, run it with the run_bash_command tool instead of pasting it to the content or code fields of the response. + 13. You are expected to mirror unmodified console outputs excerptions for further analysis into respective field of final answer. Prefer to provide a complete output. + If the output is excessively verbose and contain dozens of similar lines or repeating entries, reduce it instead, highlighting the expunged parts by + ======= output omitted ======= + 14. You are expected to supply the latest code version that was executed into respective code field. + thought_max_tokes: 5000 + tools: + run_bash_command: + auto_refresh: true + description: 'command: str # command to run in bash, for example install software + inside micromamba environment' + function: run_bash_command + package: just_agents.examples.coding.tools + parameters: + properties: + command: + type: string + required: + - command + type: object + run_python_code: + auto_refresh: true + description: 'code: str # python code to run in micromamba environment' + function: run_python_code + package: just_agents.examples.coding.tools + parameters: + properties: + code: + type: string + required: + - code + type: object diff --git a/examples/just_agents/examples/coding/cot_memory.py b/examples/just_agents/examples/coding/cot_memory.py new file mode 100644 index 0000000..4392d6f --- /dev/null +++ b/examples/just_agents/examples/coding/cot_memory.py @@ -0,0 +1,122 @@ +from pydantic import Field, PrivateAttr +from typing import Optional, Callable, List, Dict, Literal +from functools import singledispatchmethod +from just_agents.interfaces.memory import IMemory +from just_agents.just_serialization import JustSerializable + +from just_agents.patterns.interfaces.IThinkingAgent import IThought +from abc import ABC + + + +class ActionableThought(IThought): + """ + This is a thought object that is used to represent a thought in the chain of thought agent. + """ + + agent: Optional[str] = Field("Error", description="Agent's name") + + title: str = Field("Final Thought", description="Represents the title/summary of the current thinking step") + content: str = Field(..., description="The detailed explanation/reasoning for this thought step") + next_action: Literal["continue", "final_answer"] = Field(default="final_answer", description="Indicates whether to continue thinking or provide final answer") + + code: Optional[str] = Field(None, description="Optional field containing script code") + console: Optional[str] = Field(None, description="Optional field containing console outputs") + + def is_final(self) -> bool: + # Helper method to check if this is the final thought in the chain + return self.next_action == "final_answer" + +OnThoughtCallable = Callable[[ActionableThought], None] + +class IBaseThoughtMemory(JustSerializable, IMemory[str, ActionableThought], ABC): + """ + Abstract Base Class to fulfill Pydantic schema requirements for concrete-attributes. + """ + + messages: List[ActionableThought] = Field(default_factory=list) + + # Private dict of message handlers for each role + _on_message: Dict[str, List[OnThoughtCallable]] = PrivateAttr(default_factory=dict) + + def deepcopy(self) -> 'IBaseThoughtMemory': + return self.model_copy(deep=True) + + +class BaseThoughtMemory(IBaseThoughtMemory): + """ + The Memory class provides storage and handling of messages for a language model session. + It supports adding, removing, and handling different types of messages and + function calls categorized by roles: assistant, tool, user, and system. + """ + + def handle_message(self, message: ActionableThought) -> None: + """ + Implements the abstract method to handle messages based on their roles. + """ + if hasattr(message, "agent") and message.agent is not None: + name: str = message.agent + else: + name: str = "Error" + for handler in self._on_message.get(name, []): + handler(message) + + # Overriding add_message with specific implementations + @singledispatchmethod + def add_message(self, message: ActionableThought) -> None: + """ + Overrides the abstract method and provides dispatching to specific handlers. + """ + raise TypeError(f"Unsupported message format: {type(message)}") + + @add_message.register + def _add_abstract_message(self, message: ActionableThought) -> None: + """ + Handles AbstractMessage instances. + """ + self.messages.append(message) + self.handle_message(message) + + @add_message.register + def _add_abstract_message(self, message: IThought) -> None: + """ + Handles AbstractMessage instances. + """ + thought = ActionableThought.model_validate(message.model_dump()) #reinstantiate + self.messages.append(thought) + self.handle_message(thought) + + @add_message.register + def _add_abstract_message(self, message: dict) -> None: + """ + Handles AbstractMessage instances. + """ + thought = ActionableThought.model_validate(message) + self.messages.append(thought) + self.handle_message(thought) + + @add_message.register + def _add_message_list(self, messages: list) -> None: + """ + Handles lists of messages. + """ + self.add_messages(messages) + + @property + def last_message_str(self) -> Optional[str]: + last_message = self.last_message + if last_message: + return last_message.content + else: + return None + + @property + def final_thoughts(self) -> Optional[List[ActionableThought]]: + thoughts = [thought for thought in self.final_thoughts if thought.next_action == "final_answer"] + if thoughts: + return thoughts + else: + return None + + + diff --git a/examples/just_agents/examples/coding/devops_agent.yaml b/examples/just_agents/examples/coding/devops_agent.yaml new file mode 100644 index 0000000..2bc4325 --- /dev/null +++ b/examples/just_agents/examples/coding/devops_agent.yaml @@ -0,0 +1,92 @@ +class: "ChainOfThoughtAgent" +system_prompt: | + You are a seasoned DevOps AI assistant. + Your role is to assist with coding environment configuration and setup tasks, to install packages and fix imports as needed. + Please adhere to the following guidelines strictly: + 1. Always maintain your role as a DevOps and stay focused. + Note that you work in a team of agents, therefore you may see user inputs, coding plans, analysis, code outputs and such. + Your goal is to assist the team at the step at hand purely on the environment layer, by using tools to execute bash and python code. + - You act to streamline code execution, error and output analysis in accurate, efficient, and concise manner. + - You may find that no tasks at all fit your role based on current input state. This is normal, convey that as your final answer without delay. + - You adhere to RESPONSE FORMAT. + + 2. You only take action when either plan, or code output, or context suggest that an environment modification or evaluation is needed, such as: + - specific instructions or pre-conditions related to environment in plan text or context. + - code have imports that were not previously installed: you install missing packages using pip + - new code form the agents or user: You attempt to execute the code using the tools you have after you carefully prepare the environment for it + - missing or not writable folders: you attempt mkdir or chmod + - messages about missing packages or dependency problems: you install, upgrade, downgrade or examine versions of packages using pip + - missing files: you examine filesystem state using ls outputs and such + - ENV vars missing: attempt to fix if you can + - code failures related to environment: after fixing the environment, re-run the code to see if it succeeds or fails in a new way. + - missing imports: if a coder provided you a code snippet and forgot to put imports there, append them at the beginning. + - code failures due to bug, logic, etc: try to analyze the root cause. + - If the code produces some output files silently, you may probe them using 'head', 'tail' 'grep', etc. Mind the context limitations + - You read outputs, probe errorlevels, check file sizes or extract ay other meaningful information from the environment that will be of use for the other agents based on input. + - If the files downloaded are tar-ed, ziped or otherwise packed, feel free to extract them in the /input directory as necessary. + + You do not: + - You do not try to modify code logic, or fix bugs, except for the missing imports case. Instead, give minimal Root Cause Analysis in your Final Answer to assist the coder. + - You do not have root or sudo rights, act accordingly + - You do not 'cat' the entire large files, or use grep without '-m' limiters to keep the context clean, concise, meaningfull. + - You do not enable DEBUG or TRACE verbosity of stuff unless absolutely necessary for the same reason of context economy. + + 3. System constraints: + - You are working on an Ubuntu 24.04 system as user. + - You have /example, /input and /output folders mounted + - You have a micromamba environment named 'base', it is activated by default + - No other software is installed by default except basic linux tools + + Micromamba 'base' environment was created using environment.yaml file, which is: + ```yaml + name: base + channels: + - conda-forge + - bioconda + - defaults + dependencies: + - python=3.11 + - requests + - biopython + - scanpy<=1.10.3 + - scikit-learn<=1.5.2 + - polars>=1.11.0 + - pandas>=2.2.2 + - numpy<2.0.0,>=1.23 + - scipy<=1.14.1 + - pyarrow + - pip: + - genomepy>=0.16.1 + - pyensembl + - plotly + - GEOparse>=2.0.4 + ``` + + 4. Do not re-create or activate the micromamba environment 'base', it is already activate. + 5. You use 'run_bash_command' tool to install new dependencies and execute linux commands. + 6. Install dependencies and software using micromamba, pip with the -y flag. + 7. You use 'run_python_code' tool to run python code. The tool will execute it as script that is why all variables and imports created previously will not be available. + The code will be saved into /example folder as .py script and executed in the base micromamba environment. + 8. State changes to environment, e.g installed packages and exports are normally pertained, however a case is possible where the sandbox environment is reverted to clean slate described above. + In such case you would need to re-apply all the modifications from the conversation to bring the sandbox instance up to speed + 9. Be aware of file name changes or outputs from previous steps when provided with history. + 10. Use information provided in the input to write plans, python code or bash code to accomplish the given goal or task. + 11. If you have a code that was not yet executed, run it with the run_python_code tool instead of pasting it to the content or code fields of the response. + 12. If you are writing bash code, run it with the run_bash_command tool instead of pasting it to the content or code fields of the response. + 13. You are expected to mirror unmodified console outputs excerptions for further analysis into respective field of final answer. Prefer to provide a complete output. + If the output is excessively verbose and contain dozens of similar lines or repeating entries, reduce it instead, highlighting the expunged parts by + ======= output omitted ======= + 14. You are expected to supply the latest code version that was executed into respective code field. + +thought_max_tokes: 5000 +max_steps: 25 +final_max_tokens: 2500 +tools: + - package: "just_agents.examples.coding.tools" + function: "run_bash_command" + - package: "just_agents.examples.coding.tools" + function: "run_python_code" +options: + model: "gpt-4o-mini" + temperature: 0.0 + api_base: "http://127.0.0.1:14000/v1" \ No newline at end of file diff --git a/examples/just_agents/examples/coding/tools.py b/examples/just_agents/examples/coding/tools.py index 0802d0f..716617b 100644 --- a/examples/just_agents/examples/coding/tools.py +++ b/examples/just_agents/examples/coding/tools.py @@ -1,17 +1,80 @@ import re -from dotenv import load_dotenv +from just_agents.just_bus import JustEventBus + import requests -from just_agents.core.interfaces.IAgent import IAgent -from just_agents.simple.utils import build_agent -from just_agents.simple.llm_session import LLMSession + from llm_sandbox.micromamba import MicromambaSession -from llm_sandbox.docker import SandboxDockerSession + from pathlib import Path -from mounts import make_mounts, input_dir, output_dir +from just_agents.examples.coding.mounts import make_mounts, input_dir, output_dir """ Tools for running code in sandboxed environment that also mounts input and output directories. """ +CODE_OK : str = "Code syntax is correct" + +##HELPER TOOLS## + +def submit_code(code: str, filename: str)-> str: + """ + Validates the syntax of a Python code string and submits the code for future processing if correct + + Attempts to compile the provided code to check for syntax errors. Code is not executed at this step. + Returns a success message if valid or an error message with details if invalid. + + Parameters + ---------- + code : str + Python code to validate. + filename : str + Filename to include in error messages for context. + + Returns + ------- + str + 'Code syntax is correct' if valid, or an error message if syntax errors are found. + """ + result = validate_python_code_syntax(code, filename) + # Publish the validation result + event_bus = JustEventBus() + event_bus.publish("submit_code", code, filename, result) + return result + +def submit_console_output(output: str, append :bool = True)-> bool: + """ + Submits console output for further recording and analysis + + Parameters + ---------- + output : str + Python code to validate. + append : bool + Filename to include in error messages for context. + + Returns + ------- + bool + True denotes successful submission. + """ + # Publish the validation result + try: + event_bus = JustEventBus() + event_bus.publish("submit_console_output", output, append) + except Exception as e: + return False + return True + +def validate_python_code_syntax(code: str, filename: str)-> str: + """ + Validates the syntax of a Python code string. + """ + try: + # Compile the code string to check for syntax errors + compiled_code = compile(code, f"/example/{filename}", "exec") + return (CODE_OK) + except SyntaxError as e: + return (f"Syntax error in code: {e}") + def download_file(source_url: str, file_name: str) -> bool: """ Download file from source_url and save it to '/input' folder with file_name that available mount for runtime. """ @@ -73,9 +136,9 @@ def run_bash_command(command: str) -> str: """ mounts = make_mounts() try: - with MicromambaSession(image="ghcr.io/longevity-genie/just-agents/biosandbox:main", - lang="python", - keep_template=True, + with MicromambaSession(image="ghcr.io/longevity-genie/just-agents/biosandbox:main", + lang="python", + keep_template=True, verbose=True, mounts=mounts) as session: result = session.execute_command(command=command) @@ -91,9 +154,9 @@ def run_python_code(code: str) -> str: """ mounts = make_mounts() try: - with MicromambaSession(image="ghcr.io/longevity-genie/just-agents/biosandbox:main", - lang="python", - keep_template=True, + with MicromambaSession(image="ghcr.io/longevity-genie/just-agents/biosandbox:main", + lang="python", + keep_template=True, verbose=True, mounts=mounts) as session: result = session.run(code) diff --git a/examples/just_agents/examples/coding/webscrapper.py b/examples/just_agents/examples/coding/webscrapper.py index 0b61ccf..0c7a0b1 100644 --- a/examples/just_agents/examples/coding/webscrapper.py +++ b/examples/just_agents/examples/coding/webscrapper.py @@ -2,8 +2,7 @@ from dotenv import load_dotenv -from examples.coding.mounts import make_mounts, input_dir, output_dir, coding_examples_dir -from just_agents.core.interfaces.IAgent import IAgent +from just_agents.interfaces.agent import IAgent from just_agents.simple.utils import build_agent from just_agents.simple.llm_session import LLMSession from examples.coding.tools import write_thoughts_and_results, amino_match_endswith diff --git a/examples/just_agents/examples/web/nice_web.py b/examples/just_agents/examples/web/nice_web.py index 2844a1e..27424bc 100644 --- a/examples/just_agents/examples/web/nice_web.py +++ b/examples/just_agents/examples/web/nice_web.py @@ -1,6 +1,6 @@ from pathlib import Path from dotenv import load_dotenv -from just_agents.core.interfaces.IAgent import IAgent +from just_agents.interfaces.agent import IAgent from just_agents.simple.utils import build_agent from just_agents_web.web import create_app, run_server diff --git a/poetry.lock b/poetry.lock index 1bce6f6..892dd8d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,87 +13,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.11.10" +version = "3.11.11" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" files = [ - {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d"}, - {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f"}, - {file = "aiohttp-3.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d"}, - {file = "aiohttp-3.11.10-cp310-cp310-win32.whl", hash = "sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91"}, - {file = "aiohttp-3.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33"}, - {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b"}, - {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1"}, - {file = "aiohttp-3.11.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3"}, - {file = "aiohttp-3.11.10-cp311-cp311-win32.whl", hash = "sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4"}, - {file = "aiohttp-3.11.10-cp311-cp311-win_amd64.whl", hash = "sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec"}, - {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b78f053a7ecfc35f0451d961dacdc671f4bcbc2f58241a7c820e9d82559844cf"}, - {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab7485222db0959a87fbe8125e233b5a6f01f4400785b36e8a7878170d8c3138"}, - {file = "aiohttp-3.11.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf14627232dfa8730453752e9cdc210966490992234d77ff90bc8dc0dce361d5"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076bc454a7e6fd646bc82ea7f98296be0b1219b5e3ef8a488afbdd8e81fbac50"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:482cafb7dc886bebeb6c9ba7925e03591a62ab34298ee70d3dd47ba966370d2c"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf3d1a519a324af764a46da4115bdbd566b3c73fb793ffb97f9111dbc684fc4d"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24213ba85a419103e641e55c27dc7ff03536c4873470c2478cce3311ba1eee7b"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b99acd4730ad1b196bfb03ee0803e4adac371ae8efa7e1cbc820200fc5ded109"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:14cdb5a9570be5a04eec2ace174a48ae85833c2aadc86de68f55541f66ce42ab"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7e97d622cb083e86f18317282084bc9fbf261801b0192c34fe4b1febd9f7ae69"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:012f176945af138abc10c4a48743327a92b4ca9adc7a0e078077cdb5dbab7be0"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44224d815853962f48fe124748227773acd9686eba6dc102578defd6fc99e8d9"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c87bf31b7fdab94ae3adbe4a48e711bfc5f89d21cf4c197e75561def39e223bc"}, - {file = "aiohttp-3.11.10-cp312-cp312-win32.whl", hash = "sha256:06a8e2ee1cbac16fe61e51e0b0c269400e781b13bcfc33f5425912391a542985"}, - {file = "aiohttp-3.11.10-cp312-cp312-win_amd64.whl", hash = "sha256:be2b516f56ea883a3e14dda17059716593526e10fb6303189aaf5503937db408"}, - {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816"}, - {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf"}, - {file = "aiohttp-3.11.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836"}, - {file = "aiohttp-3.11.10-cp313-cp313-win32.whl", hash = "sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c"}, - {file = "aiohttp-3.11.10-cp313-cp313-win_amd64.whl", hash = "sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6"}, - {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf"}, - {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f"}, - {file = "aiohttp-3.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4"}, - {file = "aiohttp-3.11.10-cp39-cp39-win32.whl", hash = "sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be"}, - {file = "aiohttp-3.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74"}, - {file = "aiohttp-3.11.10.tar.gz", hash = "sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e"}, + {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8"}, + {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5"}, + {file = "aiohttp-3.11.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:731468f555656767cda219ab42e033355fe48c85fbe3ba83a349631541715ba2"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb23d8bb86282b342481cad4370ea0853a39e4a32a0042bb52ca6bdde132df43"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f047569d655f81cb70ea5be942ee5d4421b6219c3f05d131f64088c73bb0917f"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd7659baae9ccf94ae5fe8bfaa2c7bc2e94d24611528395ce88d009107e00c6d"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af01e42ad87ae24932138f154105e88da13ce7d202a6de93fafdafb2883a00ef"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5854be2f3e5a729800bac57a8d76af464e160f19676ab6aea74bde18ad19d438"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6526e5fb4e14f4bbf30411216780c9967c20c5a55f2f51d3abd6de68320cc2f3"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:85992ee30a31835fc482468637b3e5bd085fa8fe9392ba0bdcbdc1ef5e9e3c55"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:88a12ad8ccf325a8a5ed80e6d7c3bdc247d66175afedbe104ee2aaca72960d8e"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0a6d3fbf2232e3a08c41eca81ae4f1dff3d8f1a30bae415ebe0af2d2458b8a33"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84a585799c58b795573c7fa9b84c455adf3e1d72f19a2bf498b54a95ae0d194c"}, + {file = "aiohttp-3.11.11-cp310-cp310-win32.whl", hash = "sha256:bfde76a8f430cf5c5584553adf9926534352251d379dcb266ad2b93c54a29745"}, + {file = "aiohttp-3.11.11-cp310-cp310-win_amd64.whl", hash = "sha256:0fd82b8e9c383af11d2b26f27a478640b6b83d669440c0a71481f7c865a51da9"}, + {file = "aiohttp-3.11.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ba74ec819177af1ef7f59063c6d35a214a8fde6f987f7661f4f0eecc468a8f76"}, + {file = "aiohttp-3.11.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4af57160800b7a815f3fe0eba9b46bf28aafc195555f1824555fa2cfab6c1538"}, + {file = "aiohttp-3.11.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffa336210cf9cd8ed117011085817d00abe4c08f99968deef0013ea283547204"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b8fe282183e4a3c7a1b72f5ade1094ed1c6345a8f153506d114af5bf8accd9"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af41686ccec6a0f2bdc66686dc0f403c41ac2089f80e2214a0f82d001052c03"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70d1f9dde0e5dd9e292a6d4d00058737052b01f3532f69c0c65818dac26dc287"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:249cc6912405917344192b9f9ea5cd5b139d49e0d2f5c7f70bdfaf6b4dbf3a2e"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0eb98d90b6690827dcc84c246811feeb4e1eea683c0eac6caed7549be9c84665"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec82bf1fda6cecce7f7b915f9196601a1bd1a3079796b76d16ae4cce6d0ef89b"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9fd46ce0845cfe28f108888b3ab17abff84ff695e01e73657eec3f96d72eef34"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bd176afcf8f5d2aed50c3647d4925d0db0579d96f75a31e77cbaf67d8a87742d"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ec2aa89305006fba9ffb98970db6c8221541be7bee4c1d027421d6f6df7d1ce2"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:92cde43018a2e17d48bb09c79e4d4cb0e236de5063ce897a5e40ac7cb4878773"}, + {file = "aiohttp-3.11.11-cp311-cp311-win32.whl", hash = "sha256:aba807f9569455cba566882c8938f1a549f205ee43c27b126e5450dc9f83cc62"}, + {file = "aiohttp-3.11.11-cp311-cp311-win_amd64.whl", hash = "sha256:ae545f31489548c87b0cced5755cfe5a5308d00407000e72c4fa30b19c3220ac"}, + {file = "aiohttp-3.11.11-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886"}, + {file = "aiohttp-3.11.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2"}, + {file = "aiohttp-3.11.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e"}, + {file = "aiohttp-3.11.11-cp312-cp312-win32.whl", hash = "sha256:9f5b3c1ed63c8fa937a920b6c1bec78b74ee09593b3f5b979ab2ae5ef60d7600"}, + {file = "aiohttp-3.11.11-cp312-cp312-win_amd64.whl", hash = "sha256:1e69966ea6ef0c14ee53ef7a3d68b564cc408121ea56c0caa2dc918c1b2f553d"}, + {file = "aiohttp-3.11.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:541d823548ab69d13d23730a06f97460f4238ad2e5ed966aaf850d7c369782d9"}, + {file = "aiohttp-3.11.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:929f3ed33743a49ab127c58c3e0a827de0664bfcda566108989a14068f820194"}, + {file = "aiohttp-3.11.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0882c2820fd0132240edbb4a51eb8ceb6eef8181db9ad5291ab3332e0d71df5f"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b63de12e44935d5aca7ed7ed98a255a11e5cb47f83a9fded7a5e41c40277d104"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa54f8ef31d23c506910c21163f22b124facb573bff73930735cf9fe38bf7dff"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a344d5dc18074e3872777b62f5f7d584ae4344cd6006c17ba12103759d407af3"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7fb429ab1aafa1f48578eb315ca45bd46e9c37de11fe45c7f5f4138091e2f1"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c341c7d868750e31961d6d8e60ff040fb9d3d3a46d77fd85e1ab8e76c3e9a5c4"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed9ee95614a71e87f1a70bc81603f6c6760128b140bc4030abe6abaa988f1c3d"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:de8d38f1c2810fa2a4f1d995a2e9c70bb8737b18da04ac2afbf3971f65781d87"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a9b7371665d4f00deb8f32208c7c5e652059b0fda41cf6dbcac6114a041f1cc2"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:620598717fce1b3bd14dd09947ea53e1ad510317c85dda2c9c65b622edc96b12"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bf8d9bfee991d8acc72d060d53860f356e07a50f0e0d09a8dfedea1c554dd0d5"}, + {file = "aiohttp-3.11.11-cp313-cp313-win32.whl", hash = "sha256:9d73ee3725b7a737ad86c2eac5c57a4a97793d9f442599bea5ec67ac9f4bdc3d"}, + {file = "aiohttp-3.11.11-cp313-cp313-win_amd64.whl", hash = "sha256:c7a06301c2fb096bdb0bd25fe2011531c1453b9f2c163c8031600ec73af1cc99"}, + {file = "aiohttp-3.11.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3e23419d832d969f659c208557de4a123e30a10d26e1e14b73431d3c13444c2e"}, + {file = "aiohttp-3.11.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21fef42317cf02e05d3b09c028712e1d73a9606f02467fd803f7c1f39cc59add"}, + {file = "aiohttp-3.11.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1f21bb8d0235fc10c09ce1d11ffbd40fc50d3f08a89e4cf3a0c503dc2562247a"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1642eceeaa5ab6c9b6dfeaaa626ae314d808188ab23ae196a34c9d97efb68350"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2170816e34e10f2fd120f603e951630f8a112e1be3b60963a1f159f5699059a6"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8be8508d110d93061197fd2d6a74f7401f73b6d12f8822bbcd6d74f2b55d71b1"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eed954b161e6b9b65f6be446ed448ed3921763cc432053ceb606f89d793927e"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6c9af134da4bc9b3bd3e6a70072509f295d10ee60c697826225b60b9959acdd"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:44167fc6a763d534a6908bdb2592269b4bf30a03239bcb1654781adf5e49caf1"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:479b8c6ebd12aedfe64563b85920525d05d394b85f166b7873c8bde6da612f9c"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:10b4ff0ad793d98605958089fabfa350e8e62bd5d40aa65cdc69d6785859f94e"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b540bd67cfb54e6f0865ceccd9979687210d7ed1a1cc8c01f8e67e2f1e883d28"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1dac54e8ce2ed83b1f6b1a54005c87dfed139cf3f777fdc8afc76e7841101226"}, + {file = "aiohttp-3.11.11-cp39-cp39-win32.whl", hash = "sha256:568c1236b2fde93b7720f95a890741854c1200fba4a3471ff48b2934d2d93fd3"}, + {file = "aiohttp-3.11.11-cp39-cp39-win_amd64.whl", hash = "sha256:943a8b052e54dfd6439fd7989f67fc6a7f2138d0a2cf0a7de5f18aa4fe7eb3b1"}, + {file = "aiohttp-3.11.11.tar.gz", hash = "sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e"}, ] [package.dependencies] @@ -111,13 +111,13 @@ speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aiosignal" -version = "1.3.1" +version = "1.3.2" description = "aiosignal: a list of registered asynchronous callbacks" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [package.dependencies] @@ -180,19 +180,19 @@ files = [ [[package]] name = "attrs" -version = "24.2.0" +version = "24.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] @@ -213,138 +213,125 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -553,13 +540,13 @@ files = [ [[package]] name = "fsspec" -version = "2024.10.0" +version = "2024.12.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.10.0-py3-none-any.whl", hash = "sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871"}, - {file = "fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493"}, + {file = "fsspec-2024.12.0-py3-none-any.whl", hash = "sha256:b520aed47ad9804237ff878b504267a3b0b441e97508bd6d2d8774e3db85cee2"}, + {file = "fsspec-2024.12.0.tar.gz", hash = "sha256:670700c977ed2fb51e0d9f9253177ed20cbde4a3e5c0283cc5385b5870c8533f"}, ] [package.extras] @@ -649,13 +636,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "huggingface-hub" -version = "0.26.5" +version = "0.27.0" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.26.5-py3-none-any.whl", hash = "sha256:fb7386090bbe892072e64b85f7c4479fd2d65eea5f2543327c970d5169e83924"}, - {file = "huggingface_hub-0.26.5.tar.gz", hash = "sha256:1008bd18f60bfb65e8dbc0a97249beeeaa8c99d3c2fa649354df9fa5a13ed83b"}, + {file = "huggingface_hub-0.27.0-py3-none-any.whl", hash = "sha256:8f2e834517f1f1ddf1ecc716f91b120d7333011b7485f665a9a412eacb1a2a81"}, + {file = "huggingface_hub-0.27.0.tar.gz", hash = "sha256:902cce1a1be5739f5589e560198a65a8edcfd3b830b1666f36e4b961f0454fac"}, ] [package.dependencies] @@ -742,13 +729,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] @@ -759,86 +746,87 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jiter" -version = "0.8.0" +version = "0.8.2" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "jiter-0.8.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dee4eeb293ffcd2c3b31ebab684dbf7f7b71fe198f8eddcdf3a042cc6e10205a"}, - {file = "jiter-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aad1e6e9b01cf0304dcee14db03e92e0073287a6297caf5caf2e9dbfea16a924"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:504099fb7acdbe763e10690d560a25d4aee03d918d6a063f3a761d8a09fb833f"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2373487caad7fe39581f588ab5c9262fc1ade078d448626fec93f4ffba528858"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c341ecc3f9bccde952898b0c97c24f75b84b56a7e2f8bbc7c8e38cab0875a027"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e48e7a336529b9419d299b70c358d4ebf99b8f4b847ed3f1000ec9f320e8c0c"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ee157a8afd2943be690db679f82fafb8d347a8342e8b9c34863de30c538d55"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d7dceae3549b80087f913aad4acc2a7c1e0ab7cb983effd78bdc9c41cabdcf18"}, - {file = "jiter-0.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e29e9ecce53d396772590438214cac4ab89776f5e60bd30601f1050b34464019"}, - {file = "jiter-0.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fa1782f22d5f92c620153133f35a9a395d3f3823374bceddd3e7032e2fdfa0b1"}, - {file = "jiter-0.8.0-cp310-none-win32.whl", hash = "sha256:f754ef13b4e4f67a3bf59fe974ef4342523801c48bf422f720bd37a02a360584"}, - {file = "jiter-0.8.0-cp310-none-win_amd64.whl", hash = "sha256:796f750b65f5d605f5e7acaccc6b051675e60c41d7ac3eab40dbd7b5b81a290f"}, - {file = "jiter-0.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f6f4e645efd96b4690b9b6091dbd4e0fa2885ba5c57a0305c1916b75b4f30ff6"}, - {file = "jiter-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f61cf6d93c1ade9b8245c9f14b7900feadb0b7899dbe4aa8de268b705647df81"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0396bc5cb1309c6dab085e70bb3913cdd92218315e47b44afe9eace68ee8adaa"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62d0e42ec5dc772bd8554a304358220be5d97d721c4648b23f3a9c01ccc2cb26"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec4b711989860705733fc59fb8c41b2def97041cea656b37cf6c8ea8dee1c3f4"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859cc35bf304ab066d88f10a44a3251a9cd057fb11ec23e00be22206db878f4f"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5000195921aa293b39b9b5bc959d7fa658e7f18f938c0e52732da8e3cc70a278"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36050284c0abde57aba34964d3920f3d6228211b65df7187059bb7c7f143759a"}, - {file = "jiter-0.8.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a88f608e050cfe45c48d771e86ecdbf5258314c883c986d4217cc79e1fb5f689"}, - {file = "jiter-0.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:646cf4237665b2e13b4159d8f26d53f59bc9f2e6e135e3a508a2e5dd26d978c6"}, - {file = "jiter-0.8.0-cp311-none-win32.whl", hash = "sha256:21fe5b8345db1b3023052b2ade9bb4d369417827242892051244af8fae8ba231"}, - {file = "jiter-0.8.0-cp311-none-win_amd64.whl", hash = "sha256:30c2161c5493acf6b6c3c909973fb64ae863747def01cc7574f3954e0a15042c"}, - {file = "jiter-0.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d91a52d8f49ada2672a4b808a0c5c25d28f320a2c9ca690e30ebd561eb5a1002"}, - {file = "jiter-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c38cf25cf7862f61410b7a49684d34eb3b5bcbd7ddaf4773eea40e0bd43de706"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6189beb5c4b3117624be6b2e84545cff7611f5855d02de2d06ff68e316182be"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e13fa849c0e30643554add089983caa82f027d69fad8f50acadcb21c462244ab"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7765ca159d0a58e8e0f8ca972cd6d26a33bc97b4480d0d2309856763807cd28"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b0befe7c6e9fc867d5bed21bab0131dfe27d1fa5cd52ba2bced67da33730b7d"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d6363d4c6f1052b1d8b494eb9a72667c3ef5f80ebacfe18712728e85327000"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a873e57009863eeac3e3969e4653f07031d6270d037d6224415074ac17e5505c"}, - {file = "jiter-0.8.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2582912473c0d9940791479fe1bf2976a34f212eb8e0a82ee9e645ac275c5d16"}, - {file = "jiter-0.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:646163201af42f55393ee6e8f6136b8df488253a6533f4230a64242ecbfe6048"}, - {file = "jiter-0.8.0-cp312-none-win32.whl", hash = "sha256:96e75c9abfbf7387cba89a324d2356d86d8897ac58c956017d062ad510832dae"}, - {file = "jiter-0.8.0-cp312-none-win_amd64.whl", hash = "sha256:ed6074552b4a32e047b52dad5ab497223721efbd0e9efe68c67749f094a092f7"}, - {file = "jiter-0.8.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:dd5e351cb9b3e676ec3360a85ea96def515ad2b83c8ae3a251ce84985a2c9a6f"}, - {file = "jiter-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ba9f12b0f801ecd5ed0cec29041dc425d1050922b434314c592fc30d51022467"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7ba461c3681728d556392e8ae56fb44a550155a24905f01982317b367c21dd4"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3a15ed47ab09576db560dbc5c2c5a64477535beb056cd7d997d5dd0f2798770e"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cef55042816d0737142b0ec056c0356a5f681fb8d6aa8499b158e87098f4c6f8"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:549f170215adeb5e866f10617c3d019d8eb4e6d4e3c6b724b3b8c056514a3487"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f867edeb279d22020877640d2ea728de5817378c60a51be8af731a8a8f525306"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aef8845f463093799db4464cee2aa59d61aa8edcb3762aaa4aacbec3f478c929"}, - {file = "jiter-0.8.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:d0d6e22e4062c3d3c1bf3594baa2f67fc9dcdda8275abad99e468e0c6540bc54"}, - {file = "jiter-0.8.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:079e62e64696241ac3f408e337aaac09137ed760ccf2b72b1094b48745c13641"}, - {file = "jiter-0.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74d2b56ed3da5760544df53b5f5c39782e68efb64dc3aa0bba4cc08815e6fae8"}, - {file = "jiter-0.8.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:798dafe108cba58a7bb0a50d4d5971f98bb7f3c974e1373e750de6eb21c1a329"}, - {file = "jiter-0.8.0-cp313-none-win32.whl", hash = "sha256:ca6d3064dfc743eb0d3d7539d89d4ba886957c717567adc72744341c1e3573c9"}, - {file = "jiter-0.8.0-cp313-none-win_amd64.whl", hash = "sha256:38caedda64fe1f04b06d7011fc15e86b3b837ed5088657bf778656551e3cd8f9"}, - {file = "jiter-0.8.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:bb5c8a0a8d081c338db22e5b8d53a89a121790569cbb85f7d3cfb1fe0fbe9836"}, - {file = "jiter-0.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:202dbe8970bfb166fab950eaab8f829c505730a0b33cc5e1cfb0a1c9dd56b2f9"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9046812e5671fdcfb9ae02881fff1f6a14d484b7e8b3316179a372cdfa1e8026"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6ac56425023e52d65150918ae25480d0a1ce2a6bf5ea2097f66a2cc50f6d692"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dfcf97210c6eab9d2a1c6af15dd39e1d5154b96a7145d0a97fa1df865b7b834"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4e3c8444d418686f78c9a547b9b90031faf72a0a1a46bfec7fb31edbd889c0d"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6507011a299b7f578559084256405a8428875540d8d13530e00b688e41b09493"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0aae4738eafdd34f0f25c2d3668ce9e8fa0d7cb75a2efae543c9a69aebc37323"}, - {file = "jiter-0.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5d782e790396b13f2a7b36bdcaa3736a33293bdda80a4bf1a3ce0cd5ef9f15"}, - {file = "jiter-0.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc7f993bc2c4e03015445adbb16790c303282fce2e8d9dc3a3905b1d40e50564"}, - {file = "jiter-0.8.0-cp38-none-win32.whl", hash = "sha256:d4a8a6eda018a991fa58ef707dd51524055d11f5acb2f516d70b1be1d15ab39c"}, - {file = "jiter-0.8.0-cp38-none-win_amd64.whl", hash = "sha256:4cca948a3eda8ea24ed98acb0ee19dc755b6ad2e570ec85e1527d5167f91ff67"}, - {file = "jiter-0.8.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ef89663678d8257063ce7c00d94638e05bd72f662c5e1eb0e07a172e6c1a9a9f"}, - {file = "jiter-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c402ddcba90b4cc71db3216e8330f4db36e0da2c78cf1d8a9c3ed8f272602a94"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a6dfe795b7a173a9f8ba7421cdd92193d60c1c973bbc50dc3758a9ad0fa5eb6"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ec29a31b9abd6be39453a2c45da067138a3005d65d2c0507c530e0f1fdcd9a4"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a488f8c54bddc3ddefaf3bfd6de4a52c97fc265d77bc2dcc6ee540c17e8c342"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aeb5561adf4d26ca0d01b5811b4d7b56a8986699a473d700757b4758ef787883"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab961858d7ad13132328517d29f121ae1b2d94502191d6bcf96bddcc8bb5d1c"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a207e718d114d23acf0850a2174d290f42763d955030d9924ffa4227dbd0018f"}, - {file = "jiter-0.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:733bc9dc8ff718a0ae4695239e9268eb93e88b73b367dfac3ec227d8ce2f1e77"}, - {file = "jiter-0.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1ec27299e22d05e13a06e460bf7f75f26f9aaa0e0fb7d060f40e88df1d81faa"}, - {file = "jiter-0.8.0-cp39-none-win32.whl", hash = "sha256:e8dbfcb46553e6661d3fc1f33831598fcddf73d0f67834bce9fc3e9ebfe5c439"}, - {file = "jiter-0.8.0-cp39-none-win_amd64.whl", hash = "sha256:af2ce2487b3a93747e2cb5150081d4ae1e5874fce5924fc1a12e9e768e489ad8"}, - {file = "jiter-0.8.0.tar.gz", hash = "sha256:86fee98b569d4cc511ff2e3ec131354fafebd9348a487549c31ad371ae730310"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff"}, + {file = "jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43"}, + {file = "jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6"}, + {file = "jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44"}, + {file = "jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29"}, + {file = "jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e"}, + {file = "jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ca1f08b8e43dc3bd0594c992fb1fd2f7ce87f7bf0d44358198d6da8034afdf84"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5672a86d55416ccd214c778efccf3266b84f87b89063b582167d803246354be4"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58dc9bc9767a1101f4e5e22db1b652161a225874d66f0e5cb8e2c7d1c438b587"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b2998606d6dadbb5ccda959a33d6a5e853252d921fec1792fc902351bb4e2c"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab9a87f3784eb0e098f84a32670cfe4a79cb6512fd8f42ae3d0709f06405d18"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79aec8172b9e3c6d05fd4b219d5de1ac616bd8da934107325a6c0d0e866a21b6"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711e408732d4e9a0208008e5892c2966b485c783cd2d9a681f3eb147cf36c7ef"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:653cf462db4e8c41995e33d865965e79641ef45369d8a11f54cd30888b7e6ff1"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:9c63eaef32b7bebac8ebebf4dabebdbc6769a09c127294db6babee38e9f405b9"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:eb21aaa9a200d0a80dacc7a81038d2e476ffe473ffdd9c91eb745d623561de05"}, + {file = "jiter-0.8.2-cp313-cp313-win32.whl", hash = "sha256:789361ed945d8d42850f919342a8665d2dc79e7e44ca1c97cc786966a21f627a"}, + {file = "jiter-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ab7f43235d71e03b941c1630f4b6e3055d46b6cb8728a17663eaac9d8e83a865"}, + {file = "jiter-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b426f72cd77da3fec300ed3bc990895e2dd6b49e3bfe6c438592a3ba660e41ca"}, + {file = "jiter-0.8.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2dd880785088ff2ad21ffee205e58a8c1ddabc63612444ae41e5e4b321b39c0"}, + {file = "jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9e1fa156ee9454642adb7e7234a383884452532bc9d53d5af2d18d98ada1d79c"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cf5dfa9956d96ff2efb0f8e9c7d055904012c952539a774305aaaf3abdf3d6c"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e52bf98c7e727dd44f7c4acb980cb988448faeafed8433c867888268899b298b"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2ecaa3c23e7a7cf86d00eda3390c232f4d533cd9ddea4b04f5d0644faf642c5"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08d4c92bf480e19fc3f2717c9ce2aa31dceaa9163839a311424b6862252c943e"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d9a1eded738299ba8e106c6779ce5c3893cffa0e32e4485d680588adae6db8"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20be8b7f606df096e08b0b1b4a3c6f0515e8dac296881fe7461dfa0fb5ec817"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33f94615fcaf872f7fd8cd98ac3b429e435c77619777e8a449d9d27e01134d1"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:317b25e98a35ffec5c67efe56a4e9970852632c810d35b34ecdd70cc0e47b3b6"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9043259ee430ecd71d178fccabd8c332a3bf1e81e50cae43cc2b28d19e4cb7"}, + {file = "jiter-0.8.2-cp38-cp38-win32.whl", hash = "sha256:fc5adda618205bd4678b146612ce44c3cbfdee9697951f2c0ffdef1f26d72b63"}, + {file = "jiter-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cd646c827b4f85ef4a78e4e58f4f5854fae0caf3db91b59f0d73731448a970c6"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637"}, + {file = "jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36"}, + {file = "jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a"}, + {file = "jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d"}, ] [[package]] @@ -988,13 +976,13 @@ url = "web" [[package]] name = "litellm" -version = "1.53.9" +version = "1.56.4" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.53.9-py3-none-any.whl", hash = "sha256:509223a70b6a17c7047813ab41350283d99aeaf83586a3ed7a5e0dc36aa7fd2e"}, - {file = "litellm-1.53.9.tar.gz", hash = "sha256:30f3e920e795f329fd0f02287f9b17e3f46c5e59bbdf69356e0e3ec39e210019"}, + {file = "litellm-1.56.4-py3-none-any.whl", hash = "sha256:699a8db46f7de045069a77c435e13244b5fdaf5df1c8cb5e6ad675ef7e104ccd"}, + {file = "litellm-1.56.4.tar.gz", hash = "sha256:2808ca21878d200f7676a3d11e5bf2b5e3349ae504628f279cd7297c7dbd2038"}, ] [package.dependencies] @@ -1007,13 +995,12 @@ jsonschema = ">=4.22.0,<5.0.0" openai = ">=1.55.3" pydantic = ">=2.0.0,<3.0.0" python-dotenv = ">=0.2.0" -requests = ">=2.31.0,<3.0.0" tiktoken = ">=0.7.0" tokenizers = "*" [package.extras] extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "resend (>=0.8.0,<0.9.0)"] -proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=42.0.5,<43.0.0)", "fastapi (>=0.111.0,<0.112.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=22.0.0,<23.0.0)", "orjson (>=3.9.7,<4.0.0)", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.9,<0.0.10)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] +proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=43.0.1,<44.0.0)", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=22.0.0,<23.0.0)", "orjson (>=3.9.7,<4.0.0)", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] [[package]] name = "llm-sandbox" @@ -1292,13 +1279,13 @@ test = ["matplotlib", "pytest", "pytest-cov"] [[package]] name = "openai" -version = "1.57.0" +version = "1.58.1" description = "The official Python library for the openai API" optional = false python-versions = ">=3.8" files = [ - {file = "openai-1.57.0-py3-none-any.whl", hash = "sha256:972e36960b821797952da3dc4532f486c28e28a2a332d7d0c5407f242e9d9c39"}, - {file = "openai-1.57.0.tar.gz", hash = "sha256:76f91971c4bdbd78380c9970581075e0337b5d497c2fbf7b5255078f4b31abf9"}, + {file = "openai-1.58.1-py3-none-any.whl", hash = "sha256:e2910b1170a6b7f88ef491ac3a42c387f08bd3db533411f7ee391d166571d63c"}, + {file = "openai-1.58.1.tar.gz", hash = "sha256:f5a035fd01e141fc743f4b0e02c41ca49be8fab0866d3b67f5f29b4f4d3c0973"}, ] [package.dependencies] @@ -1313,6 +1300,7 @@ typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +realtime = ["websockets (>=13,<15)"] [[package]] name = "packaging" @@ -1433,18 +1421,18 @@ files = [ [[package]] name = "pydantic" -version = "2.10.3" +version = "2.10.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, - {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, + {file = "pydantic-2.10.4-py3-none-any.whl", hash = "sha256:597e135ea68be3a37552fb524bc7d0d66dcf93d395acd93a00682f1efcb8ee3d"}, + {file = "pydantic-2.10.4.tar.gz", hash = "sha256:82f12e9723da6de4fe2ba888b5971157b3be7ad914267dea8f05f82b28254f06"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.1" +pydantic-core = "2.27.2" typing-extensions = ">=4.12.2" [package.extras] @@ -1453,111 +1441,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.27.1" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] @@ -2340,13 +2328,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.3" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] diff --git a/web/just_agents/web/web.py b/web/just_agents/web/web.py index bd31a71..31fb9c8 100644 --- a/web/just_agents/web/web.py +++ b/web/just_agents/web/web.py @@ -1,7 +1,7 @@ import time from pathlib import Path -from fastapi import FastAPI, Request -from just_agents.core.interfaces.IAgent import IAgent +from fastapi import FastAPI +from just_agents.interfaces.agent import IAgent from just_agents.simple.utils import build_agent from starlette.responses import StreamingResponse from dotenv import load_dotenv