From 824f584185c62d4a1c898fb691800cbfb83fb126 Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Tue, 19 Nov 2024 12:15:14 +0100 Subject: [PATCH 01/42] swarm - feat: add image generation interface and base implementation --- pkgs/core/swarmauri_core/ComponentBase.py | 1 + .../swarmauri_core/image_gens/IGenImage.py | 35 +++ .../swarmauri_core/image_gens/__init__.py | 0 .../swarmauri/swarmauri/image_gens/__init_.py | 0 .../swarmauri/image_gens/base/ImageGenBase.py | 51 ++++ .../swarmauri/image_gens/base/__init__.py | 0 .../concrete/BlackForestImgGenModel.py | 259 ++++++++++++++++++ .../swarmauri/image_gens/concrete/__init__.py | 0 .../BlackForestImgGenModel_unit_test.py | 120 ++++++++ 9 files changed, 466 insertions(+) create mode 100644 pkgs/core/swarmauri_core/image_gens/IGenImage.py create mode 100644 pkgs/core/swarmauri_core/image_gens/__init__.py create mode 100644 pkgs/swarmauri/swarmauri/image_gens/__init_.py create mode 100644 pkgs/swarmauri/swarmauri/image_gens/base/ImageGenBase.py create mode 100644 pkgs/swarmauri/swarmauri/image_gens/base/__init__.py create mode 100644 pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py create mode 100644 pkgs/swarmauri/swarmauri/image_gens/concrete/__init__.py create mode 100644 pkgs/swarmauri/tests/unit/image_gens/BlackForestImgGenModel_unit_test.py diff --git a/pkgs/core/swarmauri_core/ComponentBase.py b/pkgs/core/swarmauri_core/ComponentBase.py index 9f3e86e9f..4336b2047 100644 --- a/pkgs/core/swarmauri_core/ComponentBase.py +++ b/pkgs/core/swarmauri_core/ComponentBase.py @@ -34,6 +34,7 @@ class ResourceTypes(Enum): DOCUMENT = "Document" EMBEDDING = "Embedding" EXCEPTION = "Exception" + IMAGE_GEN = "ImageGen" LLM = "LLM" MESSAGE = "Message" MEASUREMENT = "Measurement" diff --git a/pkgs/core/swarmauri_core/image_gens/IGenImage.py b/pkgs/core/swarmauri_core/image_gens/IGenImage.py new file mode 100644 index 000000000..79cebf615 --- /dev/null +++ b/pkgs/core/swarmauri_core/image_gens/IGenImage.py @@ -0,0 +1,35 @@ +from abc import ABC, abstractmethod + + +class IGenImage(ABC): + """ + Interface focusing on the basic properties and settings essential for defining image generating models. + """ + + @abstractmethod + def generate_image(self, *args, **kwargs) -> any: + """ + Generate images based on the input data provided to the model. + """ + pass + + @abstractmethod + async def agenerate_image(self, *args, **kwargs) -> any: + """ + Generate images based on the input data provided to the model. + """ + pass + + @abstractmethod + def batch_generate(self, *args, **kwargs) -> any: + """ + Generate images based on the input data provided to the model. + """ + pass + + @abstractmethod + async def abatch_generate(self, *args, **kwargs) -> any: + """ + Generate images based on the input data provided to the model. + """ + pass diff --git a/pkgs/core/swarmauri_core/image_gens/__init__.py b/pkgs/core/swarmauri_core/image_gens/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/swarmauri/swarmauri/image_gens/__init_.py b/pkgs/swarmauri/swarmauri/image_gens/__init_.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/swarmauri/swarmauri/image_gens/base/ImageGenBase.py b/pkgs/swarmauri/swarmauri/image_gens/base/ImageGenBase.py new file mode 100644 index 000000000..e9fbda41e --- /dev/null +++ b/pkgs/swarmauri/swarmauri/image_gens/base/ImageGenBase.py @@ -0,0 +1,51 @@ +from abc import abstractmethod +from typing import Optional, List, Literal +from pydantic import ConfigDict, model_validator, Field +from swarmauri_core.image_gens.IGenImage import IGenImage +from swarmauri_core.ComponentBase import ComponentBase, ResourceTypes + + +class ImageGenBase(IGenImage, ComponentBase): + allowed_models: List[str] = [] + resource: Optional[str] = Field(default=ResourceTypes.IMAGE_GEN.value, frozen=True) + model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True) + type: Literal["LLMBase"] = "LLMBase" + + @model_validator(mode="after") + @classmethod + def _validate_name_in_allowed_models(cls, values): + name = values.name + allowed_models = values.allowed_models + if name and name not in allowed_models: + raise ValueError( + f"Model name {name} is not allowed. Choose from {allowed_models}" + ) + return values + + @abstractmethod + def generate_image(self, *args, **kwargs) -> any: + """ + Generate images based on the input data provided to the model. + """ + raise NotImplementedError("generate_image() not implemented in subclass yet.") + + @abstractmethod + async def agenerate_image(self, *args, **kwargs) -> any: + """ + Generate images based on the input data provided to the model. + """ + raise NotImplementedError("agenerate_image() not implemented in subclass yet.") + + @abstractmethod + def batch_generate(self, *args, **kwargs) -> any: + """ + Generate images based on the input data provided to the model. + """ + raise NotImplementedError("batch_generate() not implemented in subclass yet.") + + @abstractmethod + async def abatch_generate(self, *args, **kwargs) -> any: + """ + Generate images based on the input data provided to the model. + """ + raise NotImplementedError("abatch_generate() not implemented in subclass yet.") diff --git a/pkgs/swarmauri/swarmauri/image_gens/base/__init__.py b/pkgs/swarmauri/swarmauri/image_gens/base/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py b/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py new file mode 100644 index 000000000..c697764ff --- /dev/null +++ b/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py @@ -0,0 +1,259 @@ +import httpx +import time +from typing import List, Literal, Optional, Dict, ClassVar +from pydantic import PrivateAttr +from swarmauri.utils.retry_decorator import retry_on_status_codes +from swarmauri.image_gens.base.ImageGenBase import ImageGenBase +import asyncio +import contextlib + + +class BlackForestImgGenModel(ImageGenBase): + """ + A model for generating images using FluxPro's image generation models through the Black Forest API. + Link to API key: https://api.bfl.ml/auth/profile + """ + + _BASE_URL: str = PrivateAttr("https://api.bfl.ml") + _client: httpx.Client = PrivateAttr() + _async_client: httpx.AsyncClient = PrivateAttr(default=None) + + api_key: str + allowed_models: List[str] = ["flux-pro-1.1", "flux-pro", "flux-dev"] + + asyncio: ClassVar = asyncio + name: str = "flux-pro" # Default model + type: Literal["BlackForestImgGenModel"] = "BlackForestImgGenModel" + + def __init__(self, **data): + """ + Initializes the BlackForestImgGenModel instance with HTTP clients. + """ + super().__init__(**data) + self._headers = { + "Content-Type": "application/json", + "X-Key": self.api_key, + } + self._client = httpx.Client(headers=self._headers, timeout=30) + + async def _get_async_client(self) -> httpx.AsyncClient: + """Gets or creates an async client instance.""" + if self._async_client is None or self._async_client.is_closed: + self._async_client = httpx.AsyncClient(headers=self._headers, timeout=30) + return self._async_client + + async def _close_async_client(self): + """Closes the async client if it exists and is open.""" + if self._async_client is not None and not self._async_client.is_closed: + await self._async_client.aclose() + self._async_client = None + + @retry_on_status_codes((429, 529), max_retries=1) + def _send_request(self, endpoint: str, data: dict) -> dict: + """Send a synchronous request to FluxPro's API for image generation.""" + url = f"{self._BASE_URL}/{endpoint}" + response = self._client.post(url, json=data) + response.raise_for_status() + return response.json() + + @retry_on_status_codes((429, 529), max_retries=1) + async def _async_send_request(self, endpoint: str, data: dict) -> dict: + """Send an asynchronous request to FluxPro's API for image generation.""" + client = await self._get_async_client() + url = f"{self._BASE_URL}/{endpoint}" + response = await client.post(url, json=data) + response.raise_for_status() + return response.json() + + @retry_on_status_codes((429, 529), max_retries=1) + def _get_result(self, task_id: str) -> dict: + """Get the result of a generation task synchronously.""" + url = f"{self._BASE_URL}/v1/get_result" + params = {"id": task_id} + response = self._client.get(url, params=params) + response.raise_for_status() + return response.json() + + @retry_on_status_codes((429, 529), max_retries=1) + async def _async_get_result(self, task_id: str) -> dict: + """Get the result of a generation task asynchronously.""" + client = await self._get_async_client() + url = f"{self._BASE_URL}/v1/get_result" + params = {"id": task_id} + response = await client.get(url, params=params) + response.raise_for_status() + return response.json() + + def generate_image( + self, + prompt: str, + width: int = 1024, + height: int = 768, + steps: Optional[int] = None, + prompt_upsampling: bool = False, + seed: Optional[int] = None, + guidance: Optional[float] = None, + safety_tolerance: Optional[int] = None, + interval: Optional[float] = None, + max_wait_time: int = 300, + check_interval: int = 10, + ) -> Dict: + """ + Generates an image based on the prompt and waits for the result synchronously. + + Args: + prompt (str): The text prompt for image generation + width (int): Image width in pixels + height (int): Image height in pixels + steps (Optional[int]): Number of inference steps + prompt_upsampling (bool): Whether to use prompt upsampling + seed (Optional[int]): Random seed for generation + guidance (Optional[float]): Guidance scale + safety_tolerance (Optional[int]): Safety tolerance level + interval (Optional[float]): Interval parameter (flux-pro only) + max_wait_time (int): Maximum time to wait for result in seconds + check_interval (int): Time between status checks in seconds + + Returns: + Dict: Dictionary containing the image URL and other result information + """ + endpoint = f"v1/{self.name}" + data = { + "prompt": prompt, + "width": width, + "height": height, + "prompt_upsampling": prompt_upsampling, + } + + if steps is not None: + data["steps"] = steps + if seed is not None: + data["seed"] = seed + if guidance is not None: + data["guidance"] = guidance + if safety_tolerance is not None: + data["safety_tolerance"] = safety_tolerance + if interval is not None and self.name == "flux-pro": + data["interval"] = interval + + response = self._send_request(endpoint, data) + task_id = response["id"] + + start_time = time.time() + while time.time() - start_time < max_wait_time: + result = self._get_result(task_id) + if result["status"] == "Ready": + return result["result"]["sample"] + elif result["status"] in [ + "Error", + "Request Moderated", + "Content Moderated", + ]: + raise Exception(f"Task failed with status: {result['status']}") + time.sleep(check_interval) + + raise TimeoutError(f"Image generation timed out after {max_wait_time} seconds") + + async def agenerate_image(self, prompt: str, **kwargs) -> Dict: + """ + Asynchronously generates an image based on the prompt and waits for the result. + + Args: + prompt (str): The text prompt for image generation + **kwargs: Additional arguments passed to generate_image + + Returns: + Dict: Dictionary containing the image URL and other result information + """ + try: + endpoint = f"v1/{self.name}" + data = { + "prompt": prompt, + "width": kwargs.get("width", 1024), + "height": kwargs.get("height", 768), + "prompt_upsampling": kwargs.get("prompt_upsampling", False), + } + + optional_params = [ + "steps", + "seed", + "guidance", + "safety_tolerance", + ] + for param in optional_params: + if param in kwargs: + data[param] = kwargs[param] + + if "interval" in kwargs and self.name == "flux-pro": + data["interval"] = kwargs["interval"] + + response = await self._async_send_request(endpoint, data) + task_id = response["id"] + + max_wait_time = kwargs.get("max_wait_time", 300) + check_interval = kwargs.get("check_interval", 10) + start_time = time.time() + + while time.time() - start_time < max_wait_time: + result = await self._async_get_result(task_id) + if result["status"] == "Ready": + return result["result"]["sample"] + elif result["status"] in [ + "Error", + "Request Moderated", + "Content Moderated", + ]: + raise Exception(f"Task failed with status: {result['status']}") + await asyncio.sleep(check_interval) + + raise TimeoutError( + f"Image generation timed out after {max_wait_time} seconds" + ) + finally: + await self._close_async_client() + + def batch_generate(self, prompts: List[str], **kwargs) -> List[Dict]: + """ + Generates images for a batch of prompts synchronously. + + Args: + prompts (List[str]): List of text prompts + **kwargs: Additional arguments passed to generate_image + + Returns: + List[Dict]: List of result dictionaries + """ + return [self.generate_image(prompt=prompt, **kwargs) for prompt in prompts] + + async def abatch_generate( + self, prompts: List[str], max_concurrent: int = 5, **kwargs + ) -> List[Dict]: + """ + Asynchronously generates images for a batch of prompts. + + Args: + prompts (List[str]): List of text prompts + max_concurrent (int): Maximum number of concurrent tasks + **kwargs: Additional arguments passed to agenerate_image + + Returns: + List[Dict]: List of result dictionaries + """ + try: + semaphore = asyncio.Semaphore(max_concurrent) + + async def process_prompt(prompt): + async with semaphore: + return await self.agenerate_image(prompt=prompt, **kwargs) + + tasks = [process_prompt(prompt) for prompt in prompts] + return await asyncio.gather(*tasks) + finally: + await self._close_async_client() + + def __del__(self): + """Cleanup method to ensure clients are closed.""" + self._client.close() + if self._async_client is not None and not self._async_client.is_closed: + with contextlib.suppress(Exception): + asyncio.run(self._close_async_client()) diff --git a/pkgs/swarmauri/swarmauri/image_gens/concrete/__init__.py b/pkgs/swarmauri/swarmauri/image_gens/concrete/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/swarmauri/tests/unit/image_gens/BlackForestImgGenModel_unit_test.py b/pkgs/swarmauri/tests/unit/image_gens/BlackForestImgGenModel_unit_test.py new file mode 100644 index 000000000..5fbd06c8a --- /dev/null +++ b/pkgs/swarmauri/tests/unit/image_gens/BlackForestImgGenModel_unit_test.py @@ -0,0 +1,120 @@ +import pytest +import os +from dotenv import load_dotenv +from swarmauri.image_gens.concrete.BlackForestImgGenModel import ( + BlackForestImgGenModel, +) + +from swarmauri.utils.timeout_wrapper import timeout + +load_dotenv() + +API_KEY = os.getenv("BLACKFOREST_API_KEY") + + +@pytest.fixture(scope="module") +def blackforest_imggen_model(): + if not API_KEY: + pytest.skip("Skipping due to environment variable not set") + model = BlackForestImgGenModel(api_key=API_KEY) + return model + + +def get_allowed_models(): + if not API_KEY: + return [] + model = BlackForestImgGenModel(api_key=API_KEY) + return model.allowed_models + + +@timeout(5) +@pytest.mark.unit +def test_model_resource(blackforest_imggen_model): + assert blackforest_imggen_model.resource == "ImageGen" + + +@timeout(5) +@pytest.mark.unit +def test_model_type(blackforest_imggen_model): + assert blackforest_imggen_model.type == "BlackForestImgGenModel" + + +@timeout(5) +@pytest.mark.unit +def test_serialization(blackforest_imggen_model): + assert ( + blackforest_imggen_model.id + == BlackForestImgGenModel.model_validate_json( + blackforest_imggen_model.model_dump_json() + ).id + ) + + +@timeout(5) +@pytest.mark.unit +def test_default_model_name(blackforest_imggen_model): + assert blackforest_imggen_model.name == "flux-pro" + + +@timeout(5) +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +def test_generate_image(blackforest_imggen_model, model_name): + model = blackforest_imggen_model + model.name = model_name + + prompt = "A cute dog playing in a park" + image_url = model.generate_image(prompt=prompt) + + assert isinstance(image_url, str) + assert image_url.startswith("http") + + +@timeout(5) +@pytest.mark.asyncio +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +async def test_agenerate_image(blackforest_imggen_model, model_name): + model = blackforest_imggen_model + model.name = model_name + + prompt = "A mountain with snow and a river" + image_url = await model.agenerate_image(prompt=prompt) + + assert isinstance(image_url, str) + assert image_url.startswith("http") + + +@timeout(5) +@pytest.mark.unit +def test_batch_generate(blackforest_imggen_model): + prompts = [ + "A futuristic city skyline", + "A tropical beach at sunset", + "A cup of coffee on a desk", + ] + + image_urls = blackforest_imggen_model.batch_generate(prompts=prompts) + + assert len(image_urls) == len(prompts) + for url in image_urls: + assert isinstance(url, str) + assert url.startswith("http") + + +@timeout(5) +@pytest.mark.asyncio +@pytest.mark.unit +async def test_abatch_generate(blackforest_imggen_model): + prompts = [ + "A space station in orbit", + "A lion resting in the savannah", + "A rainy day in a city", + ] + + image_urls = await blackforest_imggen_model.abatch_generate(prompts=prompts) + + assert len(image_urls) == len(prompts) + for url in image_urls: + assert isinstance(url, str) + assert url.startswith("http") From ad2049f212e8a8e989029426900ad56923a76e11 Mon Sep 17 00:00:00 2001 From: 3rdSon Date: Tue, 19 Nov 2024 15:53:42 +0100 Subject: [PATCH 02/42] swarm - implemented hyperbolic model --- .../llms/concrete/HyperbolicModel.py | 427 ++++++++++++++++++ .../unit/llms/HyperbolicModel_unit_test.py | 218 +++++++++ 2 files changed, 645 insertions(+) create mode 100644 pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicModel.py create mode 100644 pkgs/swarmauri/tests/unit/llms/HyperbolicModel_unit_test.py diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicModel.py b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicModel.py new file mode 100644 index 000000000..2b3677677 --- /dev/null +++ b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicModel.py @@ -0,0 +1,427 @@ +import asyncio +import json +from pydantic import PrivateAttr +import httpx +from swarmauri.utils.retry_decorator import retry_on_status_codes +from swarmauri.utils.duration_manager import DurationManager +from swarmauri.conversations.concrete.Conversation import Conversation +from typing import List, Optional, Dict, Literal, Any, AsyncGenerator, Generator + +from swarmauri_core.typing import SubclassUnion +from swarmauri.messages.base.MessageBase import MessageBase +from swarmauri.messages.concrete.AgentMessage import AgentMessage +from swarmauri.llms.base.LLMBase import LLMBase + +from swarmauri.messages.concrete.AgentMessage import UsageData + + +class HyperbolicModel(LLMBase): + """ + HyperbolicModel class for interacting with the Hyperbolic AI language models API. + + Attributes: + api_key (str): API key for authenticating requests to the Hyperbolic API. + allowed_models (List[str]): List of allowed model names that can be used. + name (str): The default model name to use for predictions. + type (Literal["HyperbolicModel"]): The type identifier for this class. + + Link to Allowed Models: https://app.hyperbolic.xyz/models + Link to API KEYS: https://app.hyperbolic.xyz/settings + """ + + api_key: str + allowed_models: List[str] = [ + "Qwen/Qwen2.5-Coder-32B-Instruct", + "meta-llama/Llama-3.2-3B-Instruct", + "Qwen/Qwen2.5-72B-Instruct", + "deepseek-ai/DeepSeek-V2.5", + "meta-llama/Meta-Llama-3-70B-Instruct", + "NousResearch/Hermes-3-Llama-3.1-70B", + "meta-llama/Meta-Llama-3.1-70B-Instruct", + "meta-llama/Meta-Llama-3.1-8B-Instruct", + ] + name: str = "meta-llama/Meta-Llama-3.1-8B-Instruct" + type: Literal["HyperbolicModel"] = "HyperbolicModel" + _BASE_URL: str = PrivateAttr( + default="https://api.hyperbolic.xyz/v1/chat/completions" + ) + _headers: Dict[str, str] = PrivateAttr(default=None) + + def __init__(self, **data) -> None: + """ + Initialize the HyperbolicModel class with the provided data. + + Args: + **data: Arbitrary keyword arguments containing initialization data. + """ + super().__init__(**data) + self._headers = { + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + "Accept": "application/json", + } + + def _format_messages( + self, + messages: List[SubclassUnion[MessageBase]], + ) -> List[Dict[str, Any]]: + """ + Formats conversation messages into the structure expected by the API. + + Args: + messages (List[MessageBase]): List of message objects from the conversation history. + + Returns: + List[Dict[str, Any]]: List of formatted message dictionaries. + """ + formatted_messages = [] + for message in messages: + formatted_message = message.model_dump( + include=["content", "role", "name"], exclude_none=True + ) + + if isinstance(formatted_message["content"], list): + formatted_message["content"] = [ + {"type": item["type"], **item} + for item in formatted_message["content"] + ] + + formatted_messages.append(formatted_message) + return formatted_messages + + def _prepare_usage_data( + self, + usage_data, + prompt_time: float = 0.0, + completion_time: float = 0.0, + ) -> UsageData: + """ + Prepare usage data by combining token counts and timing information. + + Args: + usage_data: Raw usage data containing token counts. + prompt_time (float): Time taken for prompt processing. + completion_time (float): Time taken for response completion. + + Returns: + UsageData: Processed usage data. + """ + total_time = prompt_time + completion_time + + # Filter usage data for relevant keys + filtered_usage_data = { + key: value + for key, value in usage_data.items() + if key + not in { + "prompt_tokens", + "completion_tokens", + "total_tokens", + "prompt_time", + "completion_time", + "total_time", + } + } + + usage = UsageData( + prompt_tokens=usage_data.get("prompt_tokens", 0), + completion_tokens=usage_data.get("completion_tokens", 0), + total_tokens=usage_data.get("total_tokens", 0), + prompt_time=prompt_time, + completion_time=completion_time, + total_time=total_time, + **filtered_usage_data, + ) + + return usage + + @retry_on_status_codes((429, 529), max_retries=1) + def predict( + self, + conversation: Conversation, + temperature: float = 0.7, + max_tokens: Optional[int] = None, + top_p: float = 1.0, + top_k: int = -1, + enable_json: bool = False, + stop: Optional[List[str]] = None, + ) -> Conversation: + """ + Generates a response from the model based on the given conversation. + + Args: + conversation (Conversation): Conversation object with message history. + temperature (float): Sampling temperature for response diversity. + max_tokens (Optional[int]): Maximum tokens for the model's response. + top_p (float): Cumulative probability for nucleus sampling. + top_k (int): Maximum number of tokens to consider at each step. + enable_json (bool): Whether to format the response as JSON. + stop (Optional[List[str]]): List of stop sequences for response termination. + + Returns: + Conversation: Updated conversation with the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "top_p": top_p, + "top_k": top_k, + "stream": False, + } + + if max_tokens is not None: + payload["max_tokens"] = max_tokens + if stop is not None: + payload["stop"] = stop + + with DurationManager() as promt_timer: + with httpx.Client(timeout=30) as client: + response = client.post( + self._BASE_URL, headers=self._headers, json=payload + ) + response.raise_for_status() + + response_data = response.json() + message_content = response_data["choices"][0]["message"]["content"] + usage_data = response_data.get("usage", {}) + + usage = self._prepare_usage_data(usage_data, promt_timer.duration) + conversation.add_message(AgentMessage(content=message_content, usage=usage)) + return conversation + + @retry_on_status_codes((429, 529), max_retries=1) + async def apredict( + self, + conversation: Conversation, + temperature: float = 0.7, + max_tokens: Optional[int] = None, + top_p: float = 1.0, + top_k: int = -1, + enable_json: bool = False, + stop: Optional[List[str]] = None, + ) -> Conversation: + """ + Async method to generate a response from the model based on the given conversation. + + Args are same as predict method. + """ + formatted_messages = self._format_messages(conversation.history) + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "top_p": top_p, + "top_k": top_k, + "stream": False, + } + + if max_tokens is not None: + payload["max_tokens"] = max_tokens + if stop is not None: + payload["stop"] = stop + + with DurationManager() as promt_timer: + async with httpx.AsyncClient(timeout=30) as client: + response = await client.post( + self._BASE_URL, headers=self._headers, json=payload + ) + response.raise_for_status() + + response_data = response.json() + message_content = response_data["choices"][0]["message"]["content"] + usage_data = response_data.get("usage", {}) + + usage = self._prepare_usage_data(usage_data, promt_timer.duration) + conversation.add_message(AgentMessage(content=message_content, usage=usage)) + return conversation + + @retry_on_status_codes((429, 529), max_retries=1) + def stream( + self, + conversation: Conversation, + temperature: float = 0.7, + max_tokens: Optional[int] = None, + top_p: float = 1.0, + top_k: int = -1, + enable_json: bool = False, + stop: Optional[List[str]] = None, + ) -> Generator[str, None, None]: + """ + Streams response text from the model in real-time. + + Args are same as predict method. + + Yields: + str: Partial response content from the model. + """ + formatted_messages = self._format_messages(conversation.history) + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "top_p": top_p, + "top_k": top_k, + "stream": True, + } + + if max_tokens is not None: + payload["max_tokens"] = max_tokens + if stop is not None: + payload["stop"] = stop + + with DurationManager() as promt_timer: + with httpx.Client(timeout=30) as client: + response = client.post( + self._BASE_URL, headers=self._headers, json=payload + ) + response.raise_for_status() + + message_content = "" + usage_data = {} + with DurationManager() as completion_timer: + for line in response.iter_lines(): + json_str = line.replace("data: ", "") + try: + if json_str: + chunk = json.loads(json_str) + if chunk["choices"] and chunk["choices"][0]["delta"]: + delta = chunk["choices"][0]["delta"]["content"] + message_content += delta + yield delta + if "usage" in chunk and chunk["usage"] is not None: + usage_data = chunk["usage"] + except json.JSONDecodeError: + pass + + usage = self._prepare_usage_data( + usage_data, promt_timer.duration, completion_timer.duration + ) + conversation.add_message(AgentMessage(content=message_content, usage=usage)) + + @retry_on_status_codes((429, 529), max_retries=1) + async def astream( + self, + conversation: Conversation, + temperature: float = 0.7, + max_tokens: Optional[int] = None, + top_p: float = 1.0, + top_k: int = -1, + enable_json: bool = False, + stop: Optional[List[str]] = None, + ) -> AsyncGenerator[str, None]: + """ + Async generator that streams response text from the model in real-time. + + Args are same as predict method. + + Yields: + str: Partial response content from the model. + """ + formatted_messages = self._format_messages(conversation.history) + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "top_p": top_p, + "top_k": top_k, + "stream": True, + } + + if max_tokens is not None: + payload["max_tokens"] = max_tokens + if stop is not None: + payload["stop"] = stop + + with DurationManager() as promt_timer: + async with httpx.AsyncClient(timeout=30) as client: + response = await client.post( + self._BASE_URL, headers=self._headers, json=payload + ) + response.raise_for_status() + + message_content = "" + usage_data = {} + with DurationManager() as completion_timer: + async for line in response.aiter_lines(): + json_str = line.replace("data: ", "") + try: + if json_str: + chunk = json.loads(json_str) + if chunk["choices"] and chunk["choices"][0]["delta"]: + delta = chunk["choices"][0]["delta"]["content"] + message_content += delta + yield delta + if "usage" in chunk and chunk["usage"] is not None: + usage_data = chunk["usage"] + except json.JSONDecodeError: + pass + + usage = self._prepare_usage_data( + usage_data, promt_timer.duration, completion_timer.duration + ) + conversation.add_message(AgentMessage(content=message_content, usage=usage)) + + def batch( + self, + conversations: List[Conversation], + temperature: float = 0.7, + max_tokens: Optional[int] = None, + top_p: float = 1.0, + top_k: int = -1, + enable_json: bool = False, + stop: Optional[List[str]] = None, + ) -> List[Conversation]: + """ + Processes a batch of conversations and generates responses for each sequentially. + + Args are same as predict method. + """ + results = [] + for conversation in conversations: + result_conversation = self.predict( + conversation, + temperature=temperature, + max_tokens=max_tokens, + top_p=top_p, + top_k=top_k, + enable_json=enable_json, + stop=stop, + ) + results.append(result_conversation) + return results + + async def abatch( + self, + conversations: List[Conversation], + temperature: float = 0.7, + max_tokens: Optional[int] = None, + top_p: float = 1.0, + top_k: int = -1, + enable_json: bool = False, + stop: Optional[List[str]] = None, + max_concurrent=5, + ) -> List[Conversation]: + """ + Async method for processing a batch of conversations concurrently. + + Args are same as predict method, with additional arg: + max_concurrent (int): Maximum number of concurrent requests. + """ + semaphore = asyncio.Semaphore(max_concurrent) + + async def process_conversation(conv: Conversation) -> Conversation: + async with semaphore: + return await self.apredict( + conv, + temperature=temperature, + max_tokens=max_tokens, + top_p=top_p, + top_k=top_k, + enable_json=enable_json, + stop=stop, + ) + + tasks = [process_conversation(conv) for conv in conversations] + return await asyncio.gather(*tasks) diff --git a/pkgs/swarmauri/tests/unit/llms/HyperbolicModel_unit_test.py b/pkgs/swarmauri/tests/unit/llms/HyperbolicModel_unit_test.py new file mode 100644 index 000000000..6a4bd9652 --- /dev/null +++ b/pkgs/swarmauri/tests/unit/llms/HyperbolicModel_unit_test.py @@ -0,0 +1,218 @@ +import logging +import pytest +import os + +from swarmauri.llms.concrete.HyperbolicModel import HyperbolicModel as LLM +from swarmauri.conversations.concrete.Conversation import Conversation + +from swarmauri.messages.concrete.HumanMessage import HumanMessage +from swarmauri.messages.concrete.SystemMessage import SystemMessage + +from swarmauri.messages.concrete.AgentMessage import UsageData + +from swarmauri.utils.timeout_wrapper import timeout + +from dotenv import load_dotenv + +load_dotenv() + +API_KEY = os.getenv("HYPERBOLIC_API_KEY") + + +@pytest.fixture(scope="module") +def hyperbolic_model(): + if not API_KEY: + pytest.skip("Skipping due to environment variable not set") + llm = LLM(api_key=API_KEY) + return llm + + +def get_allowed_models(): + if not API_KEY: + return [] + llm = LLM(api_key=API_KEY) + return llm.allowed_models + + +@timeout(5) +@pytest.mark.unit +def test_ubc_resource(hyperbolic_model): + assert hyperbolic_model.resource == "LLM" + + +@timeout(5) +@pytest.mark.unit +def test_ubc_type(hyperbolic_model): + assert hyperbolic_model.type == "HyperbolicModel" + + +@timeout(5) +@pytest.mark.unit +def test_serialization(hyperbolic_model): + assert ( + hyperbolic_model.id + == LLM.model_validate_json(hyperbolic_model.model_dump_json()).id + ) + + +@timeout(5) +@pytest.mark.unit +def test_default_name(hyperbolic_model): + assert hyperbolic_model.name == "meta-llama/Meta-Llama-3.1-8B-Instruct" + + +@timeout(5) +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +def test_no_system_context(hyperbolic_model, model_name): + model = hyperbolic_model + model.name = model_name + conversation = Conversation() + + input_data = "Hello" + human_message = HumanMessage(content=input_data) + conversation.add_message(human_message) + + model.predict(conversation=conversation) + prediction = conversation.get_last().content + usage_data = conversation.get_last().usage + + logging.info(usage_data) + + assert type(prediction) is str + assert isinstance(usage_data, UsageData) + + +@timeout(5) +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +def test_preamble_system_context(hyperbolic_model, model_name): + model = hyperbolic_model + model.name = model_name + conversation = Conversation() + + system_context = 'You only respond with the following phrase, "Jeff"' + human_message = SystemMessage(content=system_context) + conversation.add_message(human_message) + + input_data = "Hi" + human_message = HumanMessage(content=input_data) + conversation.add_message(human_message) + + model.predict(conversation=conversation) + prediction = conversation.get_last().content + usage_data = conversation.get_last().usage + + logging.info(usage_data) + + assert type(prediction) is str + assert "Jeff" in prediction + assert isinstance(usage_data, UsageData) + + +@timeout(5) +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +def test_stream(hyperbolic_model, model_name): + model = hyperbolic_model + model.name = model_name + conversation = Conversation() + + input_data = "Write a short story about a cat." + human_message = HumanMessage(content=input_data) + conversation.add_message(human_message) + + collected_tokens = [] + for token in model.stream(conversation=conversation): + logging.info(token) + assert isinstance(token, str) + collected_tokens.append(token) + + full_response = "".join(collected_tokens) + assert len(full_response) > 0 + assert conversation.get_last().content == full_response + assert isinstance(conversation.get_last().usage, UsageData) + + +@timeout(5) +@pytest.mark.asyncio(loop_scope="session") +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +async def test_apredict(hyperbolic_model, model_name): + model = hyperbolic_model + model.name = model_name + conversation = Conversation() + + input_data = "Hello" + human_message = HumanMessage(content=input_data) + conversation.add_message(human_message) + + result = await model.apredict(conversation=conversation) + prediction = result.get_last().content + assert isinstance(prediction, str) + assert isinstance(conversation.get_last().usage, UsageData) + + +@timeout(5) +@pytest.mark.asyncio(loop_scope="session") +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +async def test_astream(hyperbolic_model, model_name): + model = hyperbolic_model + model.name = model_name + conversation = Conversation() + + input_data = "Write a short story about a dog." + human_message = HumanMessage(content=input_data) + conversation.add_message(human_message) + + collected_tokens = [] + async for token in model.astream(conversation=conversation): + assert isinstance(token, str) + collected_tokens.append(token) + + full_response = "".join(collected_tokens) + assert len(full_response) > 0 + assert conversation.get_last().content == full_response + assert isinstance(conversation.get_last().usage, UsageData) + + +@timeout(5) +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +def test_batch(hyperbolic_model, model_name): + model = hyperbolic_model + model.name = model_name + + conversations = [] + for prompt in ["Hello", "Hi there", "Good morning"]: + conv = Conversation() + conv.add_message(HumanMessage(content=prompt)) + conversations.append(conv) + + results = model.batch(conversations=conversations) + assert len(results) == len(conversations) + for result in results: + assert isinstance(result.get_last().content, str) + assert isinstance(result.get_last().usage, UsageData) + + +@timeout(5) +@pytest.mark.asyncio(loop_scope="session") +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +async def test_abatch(hyperbolic_model, model_name): + model = hyperbolic_model + model.name = model_name + + conversations = [] + for prompt in ["Hello", "Hi there", "Good morning"]: + conv = Conversation() + conv.add_message(HumanMessage(content=prompt)) + conversations.append(conv) + + results = await model.abatch(conversations=conversations) + assert len(results) == len(conversations) + for result in results: + assert isinstance(result.get_last().content, str) + assert isinstance(result.get_last().usage, UsageData) From a3058b6f18cf289aa49858fd1770c59d7085672d Mon Sep 17 00:00:00 2001 From: 3rdSon Date: Wed, 20 Nov 2024 09:18:40 +0100 Subject: [PATCH 03/42] swarm - implemented hyperbolicImgGen --- .../llms/concrete/HyperbolicImgGenModel.py | 210 ++++++++++++++++++ .../llms/HyperbolicImgGenModel_unit_test.py | 118 ++++++++++ 2 files changed, 328 insertions(+) create mode 100644 pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicImgGenModel.py create mode 100644 pkgs/swarmauri/tests/unit/llms/HyperbolicImgGenModel_unit_test.py diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicImgGenModel.py b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicImgGenModel.py new file mode 100644 index 000000000..72d6e8267 --- /dev/null +++ b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicImgGenModel.py @@ -0,0 +1,210 @@ +import httpx +from typing import List, Literal, Optional +from pydantic import PrivateAttr +from swarmauri.utils.retry_decorator import retry_on_status_codes +from swarmauri.llms.base.LLMBase import LLMBase +import asyncio +import contextlib + + +class HyperbolicImgGenModel(LLMBase): + """ + A model class for generating images from text prompts using Hyperbolic's image generation API. + + Attributes: + api_key (str): The API key for authenticating with the Hyperbolic API. + allowed_models (List[str]): A list of available models for image generation. + asyncio (ClassVar): The asyncio module for handling asynchronous operations. + name (str): The name of the model to be used for image generation. + type (Literal["HyperbolicImgGenModel"]): The type identifier for the model class. + height (int): Height of the generated image. + width (int): Width of the generated image. + steps (int): Number of inference steps. + cfg_scale (float): Classifier-free guidance scale. + enable_refiner (bool): Whether to enable the refiner model. + backend (str): Computational backend for the model. + + Link to Allowed Models: https://app.hyperbolic.xyz/models + Link to API KEYS: https://app.hyperbolic.xyz/settings + """ + + _BASE_URL: str = PrivateAttr("https://api.hyperbolic.xyz/v1/image/generation") + _client: httpx.Client = PrivateAttr() + _async_client: httpx.AsyncClient = PrivateAttr(default=None) + + api_key: str + allowed_models: List[str] = [ + "SDXL1.0-base", + "SD1.5", + "SSD", + "SD2", + "SDXL-turbo", + ] + + name: str = "SDXL1.0-base" # Default model + type: Literal["HyperbolicImgGenModel"] = "HyperbolicImgGenModel" + + # Additional configuration parameters + height: int = 1024 + width: int = 1024 + steps: int = 30 + cfg_scale: float = 5.0 + enable_refiner: bool = False + backend: str = "auto" + + def __init__(self, **data): + """ + Initializes the HyperbolicImgGenModel instance. + + This constructor sets up HTTP clients for both synchronous and asynchronous + operations and configures request headers with the provided API key. + + Args: + **data: Keyword arguments for model initialization. + """ + super().__init__(**data) + self._headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.api_key}", + } + self._client = httpx.Client(headers=self._headers, timeout=30) + + async def _get_async_client(self) -> httpx.AsyncClient: + """ + Gets or creates an async client instance. + """ + if self._async_client is None or self._async_client.is_closed: + self._async_client = httpx.AsyncClient(headers=self._headers, timeout=30) + return self._async_client + + async def _close_async_client(self): + """ + Closes the async client if it exists and is open. + """ + if self._async_client is not None and not self._async_client.is_closed: + await self._async_client.aclose() + self._async_client = None + + def _create_request_payload(self, prompt: str) -> dict: + """ + Creates the payload for the image generation request. + """ + return { + "model_name": self.name, + "prompt": prompt, + "height": self.height, + "width": self.width, + "steps": self.steps, + "cfg_scale": self.cfg_scale, + "enable_refiner": self.enable_refiner, + "backend": self.backend, + } + + @retry_on_status_codes((429, 529), max_retries=1) + def _send_request(self, prompt: str) -> dict: + """ + Sends a synchronous request to the Hyperbolic API for image generation. + + Args: + prompt (str): The text prompt used for generating the image. + + Returns: + dict: The response data from the API. + """ + payload = self._create_request_payload(prompt) + response = self._client.post(self._BASE_URL, json=payload) + response.raise_for_status() + return response.json() + + @retry_on_status_codes((429, 529), max_retries=1) + async def _async_send_request(self, prompt: str) -> dict: + """ + Sends an asynchronous request to the Hyperbolic API for image generation. + + Args: + prompt (str): The text prompt used for generating the image. + + Returns: + dict: The response data from the API. + """ + client = await self._get_async_client() + payload = self._create_request_payload(prompt) + response = await client.post(self._BASE_URL, json=payload) + response.raise_for_status() + return response.json() + + def generate_image_base64(self, prompt: str) -> str: + """ + Generates an image synchronously based on the provided prompt and returns it as a base64-encoded string. + + Args: + prompt (str): The text prompt used for generating the image. + + Returns: + str: The base64-encoded representation of the generated image. + """ + response_data = self._send_request(prompt) + return response_data["images"][0]["image"] + + async def agenerate_image_base64(self, prompt: str) -> str: + """ + Generates an image asynchronously based on the provided prompt and returns it as a base64-encoded string. + + Args: + prompt (str): The text prompt used for generating the image. + + Returns: + str: The base64-encoded representation of the generated image. + """ + try: + response_data = await self._async_send_request(prompt) + return response_data["images"][0]["image"] + finally: + await self._close_async_client() + + def batch_base64(self, prompts: List[str]) -> List[str]: + """ + Generates images for a batch of prompts synchronously and returns them as a list of base64-encoded strings. + + Args: + prompts (List[str]): A list of text prompts for image generation. + + Returns: + List[str]: A list of base64-encoded representations of the generated images. + """ + return [self.generate_image_base64(prompt) for prompt in prompts] + + async def abatch_base64( + self, prompts: List[str], max_concurrent: int = 5 + ) -> List[str]: + """ + Generates images for a batch of prompts asynchronously and returns them as a list of base64-encoded strings. + + Args: + prompts (List[str]): A list of text prompts for image generation. + max_concurrent (int): The maximum number of concurrent tasks. + + Returns: + List[str]: A list of base64-encoded representations of the generated images. + """ + try: + semaphore = asyncio.Semaphore(max_concurrent) + + async def process_prompt(prompt): + async with semaphore: + response_data = await self._async_send_request(prompt) + return response_data["images"][0]["image"] + + tasks = [process_prompt(prompt) for prompt in prompts] + return await asyncio.gather(*tasks) + finally: + await self._close_async_client() + + def __del__(self): + """ + Cleanup method to ensure clients are closed. + """ + self._client.close() + if self._async_client is not None and not self._async_client.is_closed: + with contextlib.suppress(Exception): + asyncio.run(self._close_async_client()) diff --git a/pkgs/swarmauri/tests/unit/llms/HyperbolicImgGenModel_unit_test.py b/pkgs/swarmauri/tests/unit/llms/HyperbolicImgGenModel_unit_test.py new file mode 100644 index 000000000..1ad1b8d16 --- /dev/null +++ b/pkgs/swarmauri/tests/unit/llms/HyperbolicImgGenModel_unit_test.py @@ -0,0 +1,118 @@ +import pytest +import os +from swarmauri.llms.concrete.HyperbolicImgGenModel import HyperbolicImgGenModel +from dotenv import load_dotenv + +from swarmauri.utils.timeout_wrapper import timeout + +load_dotenv() + +API_KEY = os.getenv("HYPERBOLIC_API_KEY") + + +@pytest.fixture(scope="module") +def hyperbolic_imggen_model(): + if not API_KEY: + pytest.skip("Skipping due to environment variable not set") + model = HyperbolicImgGenModel(api_key=API_KEY) + return model + + +def get_allowed_models(): + if not API_KEY: + return [] + model = HyperbolicImgGenModel(api_key=API_KEY) + return model.allowed_models + + +@timeout(5) +@pytest.mark.unit +def test_ubc_resource(hyperbolic_imggen_model): + assert hyperbolic_imggen_model.resource == "LLM" + + +@timeout(5) +@pytest.mark.unit +def test_ubc_type(hyperbolic_imggen_model): + assert hyperbolic_imggen_model.type == "HyperbolicImgGenModel" + + +@timeout(5) +@pytest.mark.unit +def test_serialization(hyperbolic_imggen_model): + assert ( + hyperbolic_imggen_model.id + == HyperbolicImgGenModel.model_validate_json( + hyperbolic_imggen_model.model_dump_json() + ).id + ) + + +@timeout(5) +@pytest.mark.unit +def test_default_name(hyperbolic_imggen_model): + assert hyperbolic_imggen_model.name == "SDXL1.0-base" + + +@timeout(5) +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +def test_generate_image_base64(hyperbolic_imggen_model, model_name): + model = hyperbolic_imggen_model + model.name = model_name + + prompt = "A cute cat playing with a ball of yarn" + + image_base64 = model.generate_image_base64(prompt=prompt) + + assert isinstance(image_base64, str) + assert len(image_base64) > 0 + + +@timeout(5) +@pytest.mark.asyncio +@pytest.mark.parametrize("model_name", get_allowed_models()) +@pytest.mark.unit +async def test_agenerate_image_base64(hyperbolic_imggen_model, model_name): + model = hyperbolic_imggen_model + model.name = model_name + + prompt = "A serene landscape with mountains and a lake" + + image_base64 = await model.agenerate_image_base64(prompt=prompt) + + assert isinstance(image_base64, str) + assert len(image_base64) > 0 + + +@timeout(5) +@pytest.mark.unit +def test_batch_base64(hyperbolic_imggen_model): + prompts = [ + "A futuristic city skyline", + "A tropical beach at sunset", + ] + + result_base64_images = hyperbolic_imggen_model.batch_base64(prompts=prompts) + + assert len(result_base64_images) == len(prompts) + for image_base64 in result_base64_images: + assert isinstance(image_base64, str) + assert len(image_base64) > 0 + + +@timeout(5) +@pytest.mark.asyncio +@pytest.mark.unit +async def test_abatch_base64(hyperbolic_imggen_model): + prompts = [ + "An abstract painting with vibrant colors", + "A snowy mountain peak", + ] + + result_base64_images = await hyperbolic_imggen_model.abatch_base64(prompts=prompts) + + assert len(result_base64_images) == len(prompts) + for image_base64 in result_base64_images: + assert isinstance(image_base64, str) + assert len(image_base64) > 0 From c342182644381e3f6a2bc3301a6ad88cbdd705b4 Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Wed, 20 Nov 2024 10:53:41 +0100 Subject: [PATCH 04/42] comm - modules transfer --- pkgs/community/pyproject.toml | 2 +- .../embeddings/__init__.py | 0 .../embeddings/base/__init__.py | 0 .../embeddings/concrete/Doc2VecEmbedding.py | 0 .../embeddings/concrete/__init__.py | 0 .../concrete/EntityRecognitionParser.py | 0 .../parsers/concrete/TextBlobNounParser.py | 0 .../concrete/TextBlobSentenceParser.py | 0 .../tools/concrete/TextLengthTool.py | 0 .../vector_stores}/Doc2VecVectorStore.py | 3 +-- pkgs/swarmauri/pyproject.toml | 21 ++++++++++--------- .../embeddings/Doc2VecEmbedding_unit_test.py | 2 +- .../parsers/TextBlobNounParser_unit_test.py | 2 +- .../TextBlobSentenceParser_unit_test.py | 2 +- .../tests/unit/tools/TextLength_unit_test.py | 2 +- .../Doc2VecVectorStore_unit_test.py | 2 +- 16 files changed, 18 insertions(+), 18 deletions(-) create mode 100644 pkgs/community/swarmauri_community/embeddings/__init__.py create mode 100644 pkgs/community/swarmauri_community/embeddings/base/__init__.py rename pkgs/{swarmauri/swarmauri => community/swarmauri_community}/embeddings/concrete/Doc2VecEmbedding.py (100%) create mode 100644 pkgs/community/swarmauri_community/embeddings/concrete/__init__.py rename pkgs/{swarmauri/swarmauri => community/swarmauri_community}/parsers/concrete/EntityRecognitionParser.py (100%) rename pkgs/{swarmauri/swarmauri => community/swarmauri_community}/parsers/concrete/TextBlobNounParser.py (100%) rename pkgs/{swarmauri/swarmauri => community/swarmauri_community}/parsers/concrete/TextBlobSentenceParser.py (100%) rename pkgs/{swarmauri/swarmauri => community/swarmauri_community}/tools/concrete/TextLengthTool.py (100%) rename pkgs/{swarmauri/swarmauri/vector_stores/concrete => community/swarmauri_community/vector_stores}/Doc2VecVectorStore.py (96%) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index a82494195..b9b8d8665 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -32,7 +32,7 @@ pygithub = "*" python-dotenv = "*" qrcode = "*" redis = "^4.0" -scikit-learn="^1.4.2" +#scikit-learn="^1.4.2" swarmauri = "==0.5.2" textstat = "*" transformers = ">=4.45.0" diff --git a/pkgs/community/swarmauri_community/embeddings/__init__.py b/pkgs/community/swarmauri_community/embeddings/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/community/swarmauri_community/embeddings/base/__init__.py b/pkgs/community/swarmauri_community/embeddings/base/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/swarmauri/swarmauri/embeddings/concrete/Doc2VecEmbedding.py b/pkgs/community/swarmauri_community/embeddings/concrete/Doc2VecEmbedding.py similarity index 100% rename from pkgs/swarmauri/swarmauri/embeddings/concrete/Doc2VecEmbedding.py rename to pkgs/community/swarmauri_community/embeddings/concrete/Doc2VecEmbedding.py diff --git a/pkgs/community/swarmauri_community/embeddings/concrete/__init__.py b/pkgs/community/swarmauri_community/embeddings/concrete/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/swarmauri/swarmauri/parsers/concrete/EntityRecognitionParser.py b/pkgs/community/swarmauri_community/parsers/concrete/EntityRecognitionParser.py similarity index 100% rename from pkgs/swarmauri/swarmauri/parsers/concrete/EntityRecognitionParser.py rename to pkgs/community/swarmauri_community/parsers/concrete/EntityRecognitionParser.py diff --git a/pkgs/swarmauri/swarmauri/parsers/concrete/TextBlobNounParser.py b/pkgs/community/swarmauri_community/parsers/concrete/TextBlobNounParser.py similarity index 100% rename from pkgs/swarmauri/swarmauri/parsers/concrete/TextBlobNounParser.py rename to pkgs/community/swarmauri_community/parsers/concrete/TextBlobNounParser.py diff --git a/pkgs/swarmauri/swarmauri/parsers/concrete/TextBlobSentenceParser.py b/pkgs/community/swarmauri_community/parsers/concrete/TextBlobSentenceParser.py similarity index 100% rename from pkgs/swarmauri/swarmauri/parsers/concrete/TextBlobSentenceParser.py rename to pkgs/community/swarmauri_community/parsers/concrete/TextBlobSentenceParser.py diff --git a/pkgs/swarmauri/swarmauri/tools/concrete/TextLengthTool.py b/pkgs/community/swarmauri_community/tools/concrete/TextLengthTool.py similarity index 100% rename from pkgs/swarmauri/swarmauri/tools/concrete/TextLengthTool.py rename to pkgs/community/swarmauri_community/tools/concrete/TextLengthTool.py diff --git a/pkgs/swarmauri/swarmauri/vector_stores/concrete/Doc2VecVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/Doc2VecVectorStore.py similarity index 96% rename from pkgs/swarmauri/swarmauri/vector_stores/concrete/Doc2VecVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/Doc2VecVectorStore.py index cc4bace96..8aca2ee07 100644 --- a/pkgs/swarmauri/swarmauri/vector_stores/concrete/Doc2VecVectorStore.py +++ b/pkgs/community/swarmauri_community/vector_stores/Doc2VecVectorStore.py @@ -1,8 +1,7 @@ from typing import List, Union, Literal -from pydantic import PrivateAttr from swarmauri.documents.concrete.Document import Document -from swarmauri.embeddings.concrete.Doc2VecEmbedding import Doc2VecEmbedding +from swarmauri_community.embeddings.concrete.Doc2VecEmbedding import Doc2VecEmbedding from swarmauri.distances.concrete.CosineDistance import CosineDistance from swarmauri.vector_stores.base.VectorStoreBase import VectorStoreBase from swarmauri.vector_stores.base.VectorStoreRetrieveMixin import ( diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index 7ba365c89..fb20c59d9 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -34,14 +34,14 @@ aiohttp = { version = "^3.10.10", optional = true } #fal-client = { version = ">=0.5.0", optional = true } #google-generativeai = { version = "^0.8.3", optional = true } #openai = { version = "^1.52.0", optional = true } -nltk = { version = "^3.9.1", optional = true } -textblob = { version = "^0.18.0", optional = true } +#nltk = { version = "^3.9.1", optional = true } +#textblob = { version = "^0.18.0", optional = true } yake = { version = "==0.4.8", optional = true } beautifulsoup4 = { version = "04.12.3", optional = true } -gensim = { version = "==4.3.3", optional = true } +#gensim = { version = "==4.3.3", optional = true } scipy = { version = ">=1.7.0,<1.14.0", optional = true } -scikit-learn = { version = "^1.4.2", optional = true } -spacy = { version = ">=3.0.0,<=3.8.2", optional = true } +#scikit-learn = { version = "^1.4.2", optional = true } +#spacy = { version = ">=3.0.0,<=3.8.2", optional = true } transformers = { version = "^4.45.0", optional = true } torch = { version = "^2.5.0", optional = true } keras = { version = ">=3.2.0", optional = true } @@ -54,8 +54,8 @@ io = ["aiofiles", "aiohttp"] #llms = ["cohere", "mistralai", "fal-client", "google-generativeai", "openai"] nlp = ["nltk", "textblob", "yake"] nlp_tools = ["beautifulsoup4"] -ml_toolkits = ["gensim", "scipy", "scikit-learn"] -spacy = ["spacy"] +#ml_toolkits = ["gensim", "scipy", "scikit-learn"] +#spacy = ["spacy"] transformers = ["transformers"] torch = ["torch"] tensorflow = ["keras", "tf-keras"] @@ -65,10 +65,11 @@ visualization = ["matplotlib"] full = [ "aiofiles", "aiohttp", #"cohere", "mistralai", "fal-client", "google-generativeai", "openai", - "nltk", "textblob", "yake", + #"nltk", "textblob", + "yake", "beautifulsoup4", - "gensim", "scipy", "scikit-learn", - "spacy", + #"gensim", "scipy", "scikit-learn", + #"spacy", "transformers", "torch", "keras", "tf-keras", diff --git a/pkgs/swarmauri/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py b/pkgs/swarmauri/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py index c0dbb3a37..7f3afc447 100644 --- a/pkgs/swarmauri/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py +++ b/pkgs/swarmauri/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py @@ -1,5 +1,5 @@ import pytest -from swarmauri.embeddings.concrete.Doc2VecEmbedding import Doc2VecEmbedding +from swarmauri_community.embeddings.concrete.Doc2VecEmbedding import Doc2VecEmbedding @pytest.mark.unit def test_ubc_resource(): diff --git a/pkgs/swarmauri/tests/unit/parsers/TextBlobNounParser_unit_test.py b/pkgs/swarmauri/tests/unit/parsers/TextBlobNounParser_unit_test.py index 6aa6bec95..e5f8a550c 100644 --- a/pkgs/swarmauri/tests/unit/parsers/TextBlobNounParser_unit_test.py +++ b/pkgs/swarmauri/tests/unit/parsers/TextBlobNounParser_unit_test.py @@ -1,5 +1,5 @@ import pytest -from swarmauri.parsers.concrete.TextBlobNounParser import TextBlobNounParser as Parser +from swarmauri_community.parsers.concrete.TextBlobNounParser import TextBlobNounParser as Parser def setup_module(module): diff --git a/pkgs/swarmauri/tests/unit/parsers/TextBlobSentenceParser_unit_test.py b/pkgs/swarmauri/tests/unit/parsers/TextBlobSentenceParser_unit_test.py index a84023b2f..36c347906 100644 --- a/pkgs/swarmauri/tests/unit/parsers/TextBlobSentenceParser_unit_test.py +++ b/pkgs/swarmauri/tests/unit/parsers/TextBlobSentenceParser_unit_test.py @@ -1,5 +1,5 @@ import pytest -from swarmauri.parsers.concrete.TextBlobSentenceParser import TextBlobSentenceParser as Parser +from swarmauri_community.parsers.concrete.TextBlobSentenceParser import TextBlobSentenceParser as Parser @pytest.mark.unit def test_ubc_resource(): diff --git a/pkgs/swarmauri/tests/unit/tools/TextLength_unit_test.py b/pkgs/swarmauri/tests/unit/tools/TextLength_unit_test.py index 7bc0c94a1..72d3ff6bc 100644 --- a/pkgs/swarmauri/tests/unit/tools/TextLength_unit_test.py +++ b/pkgs/swarmauri/tests/unit/tools/TextLength_unit_test.py @@ -1,5 +1,5 @@ import pytest -from swarmauri.tools.concrete import TextLengthTool as Tool +from swarmauri_community.tools.concrete import TextLengthTool as Tool @pytest.mark.unit def test_ubc_resource(): diff --git a/pkgs/swarmauri/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py b/pkgs/swarmauri/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py index 7afcd7097..01c8eb634 100644 --- a/pkgs/swarmauri/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py +++ b/pkgs/swarmauri/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py @@ -1,6 +1,6 @@ import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri.vector_stores.concrete.Doc2VecVectorStore import Doc2VecVectorStore +from swarmauri_community.vector_stores.Doc2VecVectorStore import Doc2VecVectorStore @pytest.mark.unit From 99773087821cb24428da035617f751bf15a1f248 Mon Sep 17 00:00:00 2001 From: 3rdSon Date: Wed, 20 Nov 2024 11:14:50 +0100 Subject: [PATCH 05/42] swarm - implemented hyperbolicaudio --- .../llms/concrete/HyperbolicAudioTTS.py | 146 ++++++++++++++++++ .../tests/static/hyperbolic_test2.mp3 | Bin 0 -> 12429 bytes .../tests/static/hyperbolic_test3.mp3 | Bin 0 -> 13731 bytes .../tests/static/hyperbolic_test_tts.mp3 | Bin 0 -> 10005 bytes .../unit/llms/HyperbolicAudioTTS_unit_test.py | 141 +++++++++++++++++ 5 files changed, 287 insertions(+) create mode 100644 pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicAudioTTS.py create mode 100644 pkgs/swarmauri/tests/static/hyperbolic_test2.mp3 create mode 100644 pkgs/swarmauri/tests/static/hyperbolic_test3.mp3 create mode 100644 pkgs/swarmauri/tests/static/hyperbolic_test_tts.mp3 create mode 100644 pkgs/swarmauri/tests/unit/llms/HyperbolicAudioTTS_unit_test.py diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicAudioTTS.py b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicAudioTTS.py new file mode 100644 index 000000000..b6ff4de67 --- /dev/null +++ b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicAudioTTS.py @@ -0,0 +1,146 @@ +import base64 +import io +import os +from typing import AsyncIterator, Iterator, List, Literal, Dict, Optional +import httpx +from pydantic import PrivateAttr, model_validator, Field +from swarmauri.utils.retry_decorator import retry_on_status_codes +from swarmauri.llms.base.LLMBase import LLMBase +import asyncio + + +class HyperbolicAudioTTS(LLMBase): + """ + A class to interact with Hyperbolic's Text-to-Speech API, allowing for synchronous + and asynchronous text-to-speech synthesis. + + Attributes: + api_key (str): The API key for accessing Hyperbolic's TTS service. + language (Optional[str]): Language of the text. + speaker (Optional[str]): Specific speaker variant. + speed (Optional[float]): Speech speed control. + + Provider Resource: https://api.hyperbolic.xyz/v1/audio/generation + Link to API KEYS: https://app.hyperbolic.xyz/settings + """ + + api_key: str + + # Supported languages + allowed_languages: List[str] = ["EN", "ES", "FR", "ZH", "JP", "KR"] + + # Supported speakers per language + allowed_speakers: Dict[str, List[str]] = { + "EN": ["EN-US", "EN-BR", "EN-INDIA", "EN-AU"], + "ES": ["ES"], + "FR": ["FR"], + "ZH": ["ZH"], + "JP": ["JP"], + "KR": ["KR"], + } + + # Optional parameters with type hints and validation + language: Optional[str] = None + speaker: Optional[str] = None + speed: Optional[float] = Field(default=1.0, ge=0.1, le=5) + + type: Literal["HyperbolicAudioTTS"] = "HyperbolicAudioTTS" + _BASE_URL: str = PrivateAttr( + default="https://api.hyperbolic.xyz/v1/audio/generation" + ) + _headers: Dict[str, str] = PrivateAttr(default=None) + + def __init__(self, **data): + """ + Initialize the HyperbolicAudioTTS class with the provided data. + """ + super().__init__(**data) + self._headers = { + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + } + + def _prepare_payload(self, text: str) -> Dict: + """ + Prepare the payload for the TTS request. + """ + payload = {"text": text} + + # Add optional parameters if they are set + if self.language: + payload["language"] = self.language + if self.speaker: + payload["speaker"] = self.speaker + if self.speed is not None: + payload["speed"] = self.speed + + return payload + + def predict(self, text: str, audio_path: str = "output.mp3") -> str: + """ + Synchronously converts text to speech. + """ + payload = self._prepare_payload(text) + + with httpx.Client(timeout=30) as client: + response = client.post(self._BASE_URL, headers=self._headers, json=payload) + response.raise_for_status() + + # Decode base64 audio + audio_data = base64.b64decode(response.json()["audio"]) + + with open(audio_path, "wb") as audio_file: + audio_file.write(audio_data) + + return os.path.abspath(audio_path) + + async def apredict(self, text: str, audio_path: str = "output.mp3") -> str: + """ + Asynchronously converts text to speech. + """ + payload = self._prepare_payload(text) + + async with httpx.AsyncClient(timeout=30) as client: + response = await client.post( + self._BASE_URL, headers=self._headers, json=payload + ) + response.raise_for_status() + + # Decode base64 audio + audio_data = base64.b64decode(response.json()["audio"]) + + with open(audio_path, "wb") as audio_file: + audio_file.write(audio_data) + + return os.path.abspath(audio_path) + + def batch( + self, + text_path_dict: Dict[str, str], + ) -> List[str]: + """ + Synchronously process multiple text-to-speech requests in batch mode. + """ + return [ + self.predict(text=text, audio_path=path) + for text, path in text_path_dict.items() + ] + + async def abatch( + self, + text_path_dict: Dict[str, str], + max_concurrent=5, + ) -> List[str]: + """ + Asynchronously process multiple text-to-speech requests in batch mode. + """ + semaphore = asyncio.Semaphore(max_concurrent) + + async def process_conversation(text, path) -> str: + async with semaphore: + return await self.apredict(text=text, audio_path=path) + + tasks = [ + process_conversation(text, path) for text, path in text_path_dict.items() + ] + return await asyncio.gather(*tasks) diff --git a/pkgs/swarmauri/tests/static/hyperbolic_test2.mp3 b/pkgs/swarmauri/tests/static/hyperbolic_test2.mp3 new file mode 100644 index 0000000000000000000000000000000000000000..1fabb16fa56f2691a3d9538e81d9d42b97a0ce53 GIT binary patch literal 12429 zcmeI2byOVRw&xoSp2j6Wppl?S;}G24CAhl;4-i6V+zIZ%wQ+X|7A#nRU%>2$;wdzz?)&BPRoZ3}qpR<9hgh9~%TJ#;B+aZ6ukK80c zAZQ#27a#vVAt5CM!ut3z509Xjn6!d|s4*7mbg@n^(2cW&p+Ho1DMfM=d^RK4_i>XT|xFMCvnHL}=A%EH5WZ zh@ki3_F)l8Jaut-Fpt*C5XeBry1&~3g=t_3K7P)}i$x(}vR9ON&t!j9)<4;$oQtNz zVbtY2554#C!0AlSYft!u)&t##eDt0QgV=3cwKuMtpwGEXsmp@1pv1xyaJ@7G6N@7> zNkb7O4E@2cMHz!B3PMVXHH+wzj0rlbx;nJi*A6HH+)i_0{h?YADO)*8pRcW32+GE2 z^A8~g@cMK$OgVeyf;JhDo14bzQ6Cli(Ri#|5r_#5cd=c$6u1HpZlm>EqfwwCyI%-o zC1DN%{bKVvKm{=qyd}yEbL;=g$D5Pm@Ka{V&3Ba2k{7-BL<21k9cw`$bTMBVqE1&x zYXmN0gl5fZGBd|D#5%?FTCSzGuuGV0k_^T&s6hlJ$DgU)=^UU)Ct#34U?7k($ciKZ z1X63LJa#0q-e_*t)>_PV5948mOq*Z06qrS<>WD$iUKw4MW3ut5Mu%u4#O+Jb!`l#?l-o2lEE zKmCN|^06l#n+gSq#DEm)j0-ifR{9!F+yqQtEjDKwgoYY+kFZ}dXhMlgE-{ppBX`TL zGOr28dXJ}dmq**B!IL73e|=t^T)y$F{ncgS9*;CUDghgxIl14~{+=6|oNpj55z9E< zG$3HC&0bU`1s>vpvbTJMiLgpDCRla~!Ew5VNu!Irfx>Hay z+MD8L`}qexji=KX%q$Q6)`OorqEw+okM0+lzxhip67EAu`bvt=s$0FI(^nP(B6b-c zgqZC+>pMphk(O42sg^8%DwxAid@{WnKz>gSKvY99qE(_Ijzvul*T)Ph z0iPFVerAjkqCJWl#wB~0-!VjB%F-fNpCH!j7qG4q;DtB~2^GeCqaCd=My2rWAYO=> z^u*NMvwPGm`wd5_gp}iwa2FVTpAsh%moTN6E4PB2IMSLiftk^s&6}Y)n$}!Y&&-lc z@(_!=L&^PK<=iVVWskB}t{PxmbWYZiZU(EL#J|FYu)R!FG73JmM(MJ^U!?p1vNX&4 ztc@bGw)(?|>qo%U>1|gxt-((rQRRUqUh2qXff=2cm%E6A2_FgmoR_Mj`E`;WgXErL zX05Uk+89p?)(PoYX$Rg3eua-#o%DI=H{um54L*wdsqY)oFlY(#y&}VSu_EaWbkJFt z()JCfW!B$M_t1}jm&Y<O4i1G1?V-h0`$RI4_*ILZK5Zi6Q1SR1*7U#Zb zME)jQc~}KapjmhC>6f@?MJew{G?E;3NAgoz$+$sA&lx|qzugy>G_xlO?lTFZB#}t- zN8z21LG5~S^F@(F%OMF*X;!c`!`InzE_cK*U}C?m#88M(q_SyS$V*RCFr&WkNns&( zQq`O8^TQ(AGC@h1VeEWZl+5Hqq54UR`^&oo3~BxZL|qfJf{%rg!BOKjqua4^qC6mt z3`LC~iJ0-!N4)eJd>0UQglC zb7EtFwyTt}lI$XeWUBpo5iX(+X|?0X2sL7u#C&vL_bx<^e)006wID>#Z)+b5g?&2? zZ8FpYWve7(43q1OR#|;WAB*UT90%*T?LTwT;9XcqVGMrb2wa&DUcd-GD7zz|l@1-% z)d7joB1GnPo4s^~=LWeDJ#KXlMCE&*rYh1c@J}hvy#a{Ot}nB2Oa=|XH~{Eawv*LL zRidHOQ|xOk9cR6x3A|DAVkKvPI_gE@fg)iEcQ`3|UsH}+o!BaUZJHAS7da=Sc^@10 zW6OZTsG3&(**bsqG+!1!-Xb;jHSagJ4F?^)Ecq3zT5j>>i|`=&lD0sv7fz0NwX2lz z0}bT?CaVBoJ=tQb>eQ_yl|s3MchDcoC-3uc!*yUHm0>Te&|1{H^jp(F)C-A?mhSiQ zs?W(4{1QDVtJn8~&1aig0+$+FNZ%80y&YmQ8#&wLTF0?>!2qr_1_2O(BBcX@qMm-C z`am=^b)V(yIG8uM8F2iOMwnF$n*gB5KoWr=2kZ=0BbH2}PaJRq z#x&-wPu~h;=Syd za;7ARy_{c{v%GpXgu=caYV3L6E7!EU72vzU>&OVC|vSa|89_`i5S{ z-%f#77uS{RTPM2`KrNMX!3%>#<(DLyxIFB@&6TLSdu;3V^|*EoA8(y-pigeVp2v~* zJ(F}f3p}jp3CYH`y6xrerX9)61wA><74i{7jV|mYl{7}E97hJ5AgXfXV-@lbg}>Ko z2&&~wJAFS%*q_|Yj%9D$N9_2Zb27uXzlP_$?EU_VlynFs2H+GgnXH2Aa|ob0Lc%i~>Qzy_hMnAF*%eV}!%boha&5Q$*BX zW+m*G_Ku1h0-7ZoF)=bZ?!Kz-LwUO*LgRkuBK&Ub_NPCd5h`hl?iyPqE{I11djn- zUta=~{-#A!q%?R(q5*ZJzumjshhKJuE$8CGK3H?T*cCZSFdsaE-M8_xLr}N}H9h7O z4act7**V;?#uk>}o`F9kPDGqi#pqEDYoRE7WfL4|e$acgaIhtfC9~)ei34>r8CdTW zI6+uHs_k8a6umWszc0#E65NEwRbjoVXm8(1d*=e~56$L*b$iNzQa{WQQlKw>;Epne zOGNlG?J8ASR3v1MkzPgZdPdrlbinUZ>MS`3DdQ}@IB{$=qjjH@R_dWyR#Wvd^M6rT0Yn^Yx zqLvV0)$=W!Qa9fBd#zO8qJYEsvY!@U#cqrQe&Cn5pa8N&^x#5!_ zdEZCeukjOt9~@NwHA51|TOXWBe$ld`RZ^c5ZaWsT!2Bpdju~dw9M9nZm*_W&>KH;v zRa8t?CxI2W?DaLTJF`E~z-I-4x4<6;p(bwZ;sLewI45uCuMU-ER_l*qL$r`V2XGr0 z&s6%xi5~J?cMW+67+t~BUS^q+u9IoamVmKm&mU$C7>(}cxty=c#v2eaI?kJHYn>Dyd6gdaSMK>i2z`Xck z5Wh_{>9QVtH2D}biD}+joWcBZ_Mg^=hZpNJEFAhcJM6-zi6HHqm5#vI9fRnV3W)`y z=9M&0ce121o**$c*dAa-xo}&Xna^h>2U6H`qehZ;p8K+rBrnCOZoX|WF3cOfP;X>f1GRf7j;+_F_`YF?Ohd3>GM_D9`4->ivyk(b-XM)Y?KvvM|uEsgn?rtn{Ye zJnsp!kh5Ot%p^N;Q+1x0zw*qmPX<(={K<|zg>-D{d-C^WmpmbQ zY){&5^-Hk&iSeUYYdV5$2~5ksaGZ##G&4FDRdF^NajMT%4R=iJdsMTv%CK;GY^{3k zUxl7wmfmoO+ZFiJfWUOqQZwXa&~k%(rzg48x(|Y&=CFi^!oI zhK8uyL3{fRlA}?t==~*`j+a!`V!vb&@5J_{55KP}?E(u=U_h+!Mjlm_WqlQMFuB*|SzrLCz zno=}f1DD+mNthH;$s9cjZ1lxgNq5218HmD^T=JqK32m^+;k+IhACD1D0h1*Hwst2) z4?q5pP4vUK*C}vq3gZ@#nFVB>Hh@ibQ39Yp%^wiu1AhII$H*pHWJ>9quHDmf`MxrC zk6~dIlVn8LrdI3w5q7qTPYV8=^ozB+^y- zsn$XKw>Gr}&l@y8u~|YHly^(>IHgjb3lEhmcRtT6Jl94K85AxcK8SXQq|VS3lPFhf zpU#jRLyH)MMMnh_8G)NbPlVmmu8Ch<`5Sy-tH5&d$hYr9>Boyx*>P9VDFdN|)SL=k zLx9gJU-Ym|9V6O9m_tE@Zb4rY>#gR;$;a5*wY|x~qYvD#0@EL-eCgJHieMUW!K+dG zVg@k#r6$a51p<$@ZxTc&*i5L)o9^Bs>mUN1N+Sg0hp0~_i0%+FI-)k?H~SVOn5H7e zjF=GII9`?}cpsB}WjNFr8U~UP)2Dh=-&xM{x grYsu(@6#+GDlR;h6E3F$R@1ID zwfUbh7BJW(l==1a3K7!CIaiAA#(X8s30f^guc`=wQuZd)6gkG z*7Dp;0NqH2?A04vCT7C$*&)xfiCCyVsgJJ6iHL=z&C`%Dx>?fI&qV$?@DWsM{i^d% zvcO!jFmrj;Fl~*2So7)3s2$bj9fVET{X|!*eTqix;S%TWWCA*=`=pGXg-3J#d12G? z?L8e{)i22nsBBnD&W2;Srm;*liAZfyVq>5#S?tU6ADWNdnW?o9Ye40gG|Bd<6M;KNrP*$CO06|(39FhA zk6G9AqL&M9%pZ8Xc5q!k@es3(Bzj3z8WfdaMr}z7&O8FQYAk}G?i7cFpJu#-KK;Ie zPrkxOa2cETt2}#hv3d5}lf;hZ zE0eLo?8(Zd>7*2Uy|Na3Co^E4Vo1JO17X8v!-ntUdb0;fO(72A7Iz&U*c|S<2|PUM zi^_8NjQ!Zw*e#U!elfQlsSwkaRJ7E5h}@(7$Ez)_>i6Gf@{VTvdoF2_JvT2Xe*cCi zPV|`!yvamuuA+g1WAha!e!iO={ZCbW|M>)qMxZQg&z*JU-*mQ{Zw@JbXOIDsYMS*AOjT`ase4kgF z{>8(%P9X*+dvOz{KTB2`D>3zks*G=Gp?3|q%@O4Oq@QiALY#JU)5TGtT7p3;(>Z*- z(DBj3yd_K{3bHx~py)555^jafNFSk`JQI&8Z9!l}1> zNQpA%L|J>%RQsL;&Al0=%BqC7V3Qvf1VeQbH ztsy@_f5|#aUP&tj<(@9u3&)8&2tQ#2?M7Az3iZ+3y1nkuf^TrWo1WH-9FcK`6C%(# z9s1l7Z=?Zls24?agesfwqD&LPEtGU*Dlg?#jGql8@78-wlTUp2Tu8cJE3s9iW=h_u zNSHI$o&G)5HqQ(H-XbbpZHA=iAz8QyV_DC_^C^!uXFSadmyA}6pfWEEZG9#|q9Rf2 z(NWLx>S0(R?QokVZo+rH(5}}p8S*kN_)kYGVo%-Na%^#`LtCuQmca;z^Hk>+Bkzpg z2LnF#RjIF>E<2v(d+SO0YbWOM&0}39Invwsv;q!nvaH%dXx$Q64>*-?XmsGDO6MdR z2)cEJWJmJ!Wsaa-3@u(?Ir%#X$PjY$z7>Mb)1kMLLo|fU{IOr^jR80^Vfu(x&pwIC zF4&ATzWqCAQkqbhUt5qMr&CJq|QyMR=`$k?(k6k3nL)1SM z3w*00M9bI)cp z@S$ERU~5g8+`{KordXZ@Plzk%QhO7=Z&H5k(s%Klzhm&6A*~_av`n5RHz~5Z5Qs);fmsj41rLzuehJ_Q;c7 zU-P`L;77GP7kZ>O9&mXcuJi4a_;oAvmuYL@n3!M?8VRt^b4F9{d>eYoq4xP}k#C#Y=}#g-Bys?nJ(a?&>xyyAg#f zgzmgiAzC$tvBYtxl_tJEEOAy9wADMpTw_v=g?80&@rECwhi?sKx?F&ZjI}V1;P-Jk zpMl9x`HDQ?yJehx;?a|dJuI4(kH=zz0Tpa%7l`?1SFc?Zig_}(t64ZG9>PZ~^CEYfwu_1Q{;TS|Fgasz|qg6Z)YJ*OIowt(YiTB z{5UiI%j9$I)JLxO@D9umf9r4$gmtW1>7Q;t5UFa&w(R=8E86YKJTDr;c& z(S(bn-czCir|S6^Z0qH$TF7ss4RO5~0n#5O5Iv=;WSX58o#HVg!cuRevr>8+%pv9*t$ z8y!&=HWw$se0}hLeqb)2LDM63Fm z8Z9GPQs1O3y~|}jw6E6)J8Y+QP}HdETw7Qr1Hc%Kf((D}pGnPVR84X1GoX$HC=2oz-Z29K5XsiegQU-*)W!}vk5Ha|(T)dbTQ z$Jv_x?*ercO818fWS&?iq`CYJf`J_}?Ok z?liXU-?kS^nh*8@z;Vs@$A#!rkRu-HrlLULCgO^4_1kyXJW{${md-CiD6tkDMnw=> z6dnk%V4$vxBCj<4CI{*lU5uO(wsz5c3>M!nn2`PMdJHsw?oaAMt2D_j0uO%6Z#V5d z6gxp#zYGM9GnrY&pA7E-DLoOW6j#7l3QvyQvPemNb~7qENO-|pFoCAXMU51N7eklDyLT`I5rgymYi$Ef&1djcXL<_tZ4;> zv+G+s&50q4Bd@o1PhN^ORa`Y?1p>WI#m#TJRz$=ZH({vWNkqegA@d_%aWEQiP@_PL zU`|_f$|K>EG>uVjnytFl^rJLrxqD?xi>+(|7Pg+jzGHfuLoHLr#MPO55Ehvq);J#g ztxApF+WGBmg>Mlg__~SWXs$5=ws`rc?b!sls zUBU_kf>9FEI+eEHb?QEdpuk2KIZgJh`JrQX2>x}8Tw!a|5mbi(P0=;ZyWN+bn$|1M zy%iI+<&(!(XC+F5cY5SRLK3t|R1!N45p26#Snr+6<|`WA6l(c?|4fzDGd1je8+wno zH^A01L!oMqt+D1*mz|Ww-&RJGO6;@uC3h4qee%gBPgRF7H6w(Vkz7PIk$;&v82@75 zEq=96(FAz0^DUJ~hDP=bTtpZJHbG1;HPqR;`I z2no4tg)n6ruHEph3iJMUOxGTx;bv%9am|VN!moF6C0BX>x;KZvr%6$h{gf^y#-30I z3i9*++%kxj|1>=Q4#F|)ewdv$f(vICmFq`$O58OH=ml|IFMVhM%rd16WjHQQ*Ra3f zwBg-DjYd1x%yUb%mF~ikx79241Hqc`+eXUOS^dPi7n3Z4xrf8w_fo{+4qS^ow zy6httcB6eT>T*L_)Wf={hno1L(HV+n<6n9dKo!x5i5w`@ex&Pg_GokK4w zG%8a~MZ)(h(nS9DBrI7eBl2r2QkiycLhw&mQ29nV-a1F3R>^P(_kQ9Bp>`Of=xv4>o2T*YIZ3YSZ!{W(;@XE=%;fS)-IJ#w46ADQNB31N+=KVn+D- z*t)bJSGXTj4dvsRL(*F#6eByCu}ZpgWN9C?qjbd&ZXz5Ex~yikz!h@@c)%{Uh<=@3|8H-d+-f zMF&D<7Dffe8E&`edwnXAx0`~nFhTq(M!(draVqcJ`y7Pldm+A^;a*^vT#AH%TzC2L ze`NmelG7j%9SEZ5_*H=bL{kJw&SABDad2t6tNvf59Yj*&cT9TgxK= zxX^ahCj#Y34NvYp{*pB}VPe4YW?B-Mzkt$706 zi1h*=|Iqn6+5h_f-Dq}{;!R9{(WwLvd(`4K5<*k literal 0 HcmV?d00001 diff --git a/pkgs/swarmauri/tests/static/hyperbolic_test3.mp3 b/pkgs/swarmauri/tests/static/hyperbolic_test3.mp3 new file mode 100644 index 0000000000000000000000000000000000000000..f8a7a0a8b5719ae56d3c9cd37bb57e1e0c528fda GIT binary patch literal 13731 zcmeHt1yEe!lJ20v17sir0Wu6ua0w2Bd(gpxyN2KpY)EhmuEE`cLx3P55Hz@k1cJL; z0vXPOySw++zPq)r?yKFZC%3C+&Z#5+_xIQR_vyc@f$O*)(EluYb}wvhuid}>$$&uo z1t2^EA|hfkGAdeH#wSlW`S?U7CFPWq)U@^VjLpp*U0l5U{DMM5V-ge7va<4viz{my z8(Z4jKlSwV4Gau@{W>~6J~=%-J3qhhV`*u5b#;Anb9;Ap|M2km^z{7l^7r*M3b@4* zRgqMc<>lb!;=-){w<(c9>9Ob}L1@E&nMabU@4wyn|K$i==k|cmx(F~}AS4LNi7e2L z1EIgfj;8|tBIj3RxU8!E7cb7HwXgvHM-k;3&Lh5yQ6bKjj7@yzvRtR!ea;>_L)U?G zo%82#?9s1Iz;11&bqZaLE|cq1H}|>*!>`e=oE7i&8;8(cpgK{?fE4-|;r;85A+eZi z=kFI~t-7iKUqcWYW?#Zvn3dKi(ycsf1OU&&F-egS06;x5@VPnc-1oAcKJ(ac+4kU^ z{;L@g|0<^U9s}3yc#=iix6eoHUN0X+M|N9 zsCGN?oL#<9y?eEI>@B==yv=HezYXQR6e>O=>KHgVQM876w5K629XF^BrFJF1l4yLF z{1rStO*Rg*J#fViLgOac33~?`({llBhh8plprcGT(C=B)Jw0mP%X)|XDHETHxom=i zQ5XCn<{rCBAu}xe!@vZ#%yzlo)SGzIX$)wD-}h|+SzWU^K2r^ zc=0iP1%UH={W65uhAyq8&;j6@xaSP5*YX}1GZ+NQ0GTJnfk3fmB<3&2IufgdQ{q1Z zz&ioN(~!4;&rPw7vAG1h&-l>nsh|xUFmv&sVFzgCit020?%J8NSkhw%t4w97Soo$P z;h^9oa;z3_Lf<*LFNzga@E4pnGT*Vl7)TJv6qMg{ixcA?CRItd0y)A=#oQhyV3_|H zh^ZwJZHKD(9W8>kGEFcE#1|{%o@ezplY2NsdmOSYi7}E69lc|ga$G!+bp3FV<-p)M z=q$@ysbl-2+40lj=R2GW5PBL0gzk1Rmj}@u2O~i>o$BVi3f`|Yc&(D0y*^j`w5O?v<&-iKb=#(EG2X5DtD04CCoe!h$9csQGdw1Y0wpoz7i z^|(RDposhil(LLgn54yqHF;V;*Pb%V#mZFSi)n97zrLp8%b$qXuN6&MUs$=1=%(HT zidT+H@YqBiF!$9uEmssiuI~9W-6CJc#V4d`n~ZBp+NE&+N785J+A#0LR8YU44LO$B z0z#=dmTfRxQI46AKTBrm$%Pz5Nto8dvJLlLh<;kK-ayhvoZ%7QrzJxp+Y|3)EuYm* z*gMJXT}bnNY#p|WpARV=S+@cF4XQ03K7fX*f2nGSlem)3XSEabsm&GEYuITMq|tiF&3DC5@MiX=5t}E)gWC~82R#~gWx1% zGI#})dIGCYbE00Rj+)@Yz_#DhhlXSJz7d4eeI2a7xo|g!%-3p^khIX@xrH|aWYXf! zz*w}@h;sjqtm6;;@Qo z72dW-V~`W8XL5Mr9ODc>|rs<4YGLJdRoyaEpfF+|DPmZunOVQr_j$1$W?e zyhgD}mG~7&n@+5aYfKFJOU+XESW!8KJ>f3y!Tw~4eds;OfdkB#6Oy2J2It%)g>`%R z-({mT1sr#T#@&e6IFl?OZkRD-s^%|GU}*Q zK#MmJIKPlfh!PehfTb}A2Wq3$QKyp|C1b%$m1?lDtji-QQ_aE{L_QX49)U#|N%K4_ z(k*mO#@W71zstaTo%V^Qlkib|KX;NpmBr!ZQ4w%fPQ*pPZ$AqFe^?NlQyocyFaH35 zc=hy?)1rBWd8;5fSIz$A_y$2sEO>I}OBN8ipSP#8xJ&74G|~^kTVy;`vJrHN`N(8B zI_x#Us_~h?-_)^pB>Vyq8HHHwLF$%&aj~J^;1zy&3d`;3wjaWU%G6I1ab&Nq9 z$fMCbzvw4??7SwSj+foDZ006dlU=6qW)H#@6J%4_K3MZ2>i6ZrExbajG<=ixkrE9F zb!QRj*1xXaq@DuT>OGf8LKa_#J*e7tH)o}PD525Aog9EJ>x|heZn3IDy(U)#Q->J@Y?}Bzy0Vlo1_=Cg4bZX{t+k0dVRcUukpJe zsR=hYbtmQ?d^Qx?IMY0SNrrrW_@YJAwO5%!{GeTU*z7=*hVNmlB(5*zkAt~)gZe2{ zK_-{}YnBA#!fba&-2xFoS7rAhE@n0`Te0Anq6z(|{^jeG@Ar&yY$_f!6}{~Z0P34M zX6Gx7xR*KTJN1M{4QVqJ*T(2|)LeZ}KNASdUp=$$<65T+59`h!XP7DXq%Op zm@*(6T`8kQYf?YTn_m5O88MTRun%!lS`@s!5LIES)!@&{chfJs1tKppq`HahbC+s3ngOk_ZE2uTx7s>Ngzdu;(u9KI&Pei~naK8oHLjRs%uMTZe^j7oKO3}-$ zvxhLxd9&l?C7>!gYIuj0fL>Rl7{&`6eveE>b=n-*$ifQyD`_CETr-~)U?*ksrx)7E z1o+zUAY;E$Qd*5TTsD>gf~2R`nxiq4*xw-+zKYJr;T^BqM0SoE+J+G6_4$)t@@C{h zOo1cDd;1c=yc2{rWvN`6lHn34N3SnnW{4oiVTPAWy8~?z+9JTsmVr;&T0BKK2`1C_ zY55U;Mwo~9dHTZ~?p!|e+AEH962dSSJ4{KOEcvHLA7fj%7@Lh~5Ds@FTp@|nQ8s(+ zxMma8RqaXSR?0r+<2&b;T$4JCjym~e*vvEW$4gU18X1>}?9)BHkLPjK(`1pQ<8mJ>NRHf-aWkoi zk+Cm1)&s>7j=IAeyuF}xiON>*?XU`DcX}i>lgqE2aPVtp%`$6AP{BAWlFMa4k0HFI z`O1yNV*Z>))M%y1ftTqD5t$00l-jokH-*6EtXTJy$1uaZ^ z>-#GIH+V&A6`dRu1c+TNMk>%dluU+$1XD{ zkDq5i<70yZjf)^QNQ@f3DlhYp0_veZaq*mit;%U-@N2XO5jB?WKkU%!^H zV4h3}gkVVK8a$`I6T^L0i94fiBMHX?9ri>^@><^S zp~8~cpIEoFPRxafhLIW%-VaV|R8?N&M|{b+;H%1|Jd{^XehpJh4#AXtp$*J^+T@CB z2EL~MP}1S@09h#Muy_C;D=nJmJ002{ZrNBd$A^@DAtiGK$p*{IHD3pL#-oK+zhvtb zCSypPn4ZsMv*5y;j-C*#8=N{*N5n2X+x*UaJ%!JM^PXeniD$c^&}`tNrhP+zEj}^8 zL|c_|AVa&}U(E7_&7O>qA?!Iydsstir#N;0nGX$u93;0ciA5lzZCMuR0pjEvPuxb5 zkA}%Gn(U=3ND~tgpcB&~Tfn@yd$fjVWauTz)#T(4_nb;^6MEC0QxKtsm(3njqwM4R zz4jEcBIq{cfUP0!tS6(1zc0ZePuU`|b>dgB@~?;pgu(z3)oixz26u~al@>*ll5N^|5z%p-5uXolV=$kJ!+I}8d=!Uf zIAS&~3M%-uB_pT>$`}5n<0(9fh)fqL@f=fr_ZYvlPBdFeFxuw>FDJ8okHy@|uMpD6 zQw6(Ovex?rT;FaszB@V&MO^v?UcVbLU7nGxP-So~)i98ZDAdyok7c~y^B{KO&*?}7 z0bwi!-PNhd<*2_??6wtnGVEa^x(0^Pc{zylVnz{F`e8rrE#xnr4Z8INlfAT><chh>Vuz3`xI4UL>tOTZlVZ8ih(zWruxRW=Q9gKH1QNt}yssQq> zc2%2_q{AZQNpxEs!$*f8%_WPkM>;hO@U02!_r7TdR2L(AYUYLFJ+O8KL zKkv5uN^a3^za}vOR`+=?kLF`q0Kg!iW5mnP$?NyGj5<5u`en~9l6c)$`n?#&3Y4{^ zv8QT}ZTUM)fj(d-pAL!{=r&c*E;K;~!oW$jw6XD&nP2#p3nwP%DmUy_|1%7ImwrtW zkM`*}SaRHc@Y8@D5>5X#ZCNnV#~B1SfTgkGr*VK{(WJr3xeoXs^&XsKa*}N@Z7>u< z3o^&RC4T)T4grR)JVZklVa)PC-|#&vl+lFt5Ze-%Y4dyAA6t+t+vaQYKb_=HH4hJO}cmb6l z`g%;@{2?J#*VvoH%e_uuOe^4xeijdots(41`qLLC+{(Av&1bG(AYqJ0!k-IR`BR)d z5{fe=jlQS{y^a=8@9h;Fdq(#_Zj6tulu8@S+!e(AChFb0J4ZN25+>wrKU8$zh;raJV$Q8cGdQEJ$(zM!rWz?+00*mp#Ql58BN%Xn#UW^_#NO7(Dez^JSBfjS}JA3tr z&d~=wAve;2!8&{U_CvP0xF4ObimB^Eefy|QmL&EZDOsKGs($N8DzbF>-`_|vnz`*H z_>mwlpb-xardsgnu*AvCr|pA(L%t@B=h0)#Q3&CvheUVb-+XQiKe9-$Xe(PeVbWq3 zziwR3;S|eY9SfCgFUYE0YR9G-B4LjKfXVq8FH1TJ;Nr{ZVw=Si=S`sguyhBOk@O=c z$0JzXzPj@Q_;YZup!RZp9Xvzz6tFv5aQ5#s1wnBl2s^$asW*sROpC6#gM(m?J5Kx%+UtHg-T5()YZpFT&9+6b5|^zY>UI$62iPiGD&KE#g^R zW%F0c6%08>`Re$l#&I;dI!NFrhcQkmJzi{iRwR&L`+XC^ZO`N!Y`#fKWwz8hrhc$u zz60#YkUZ9|W#{*8D^2S`3!z#*ixJ_?RjCF7`&U}YtJGsfat`#*ERpsP#TU-tc(|;E z=3Y=PBbW!89e+lxvgQQiUWF#nQ(O=?W*S^B#^Mpdz|@0d{kiBZg^_)_eF$@~0<%+S zhO<9Y;B`2$+2eRm5OG$ng7dJrnji`0^YO3vFH9O}? z$!9d@=cgxW($I{0e+o(!!By%gPwk4l!L8CgJdWaO3vNbOvu;)Cx*KQl`+kmaX)%fo zmm)^^#yLWrIU^>-AK2&rSxW(0O=Uc-47(S|Upb|dXq@~Z;WE_9MKSH$hUJ+8?4@{))rxA*G)a!+vdH{t2tq^4QZN%G2}P$#%n^ z`Fr2Z@DyK)de=?FZrf9Z*nPZUe9yEIz%RTav7;?WCSI~FUMBp7?sV*9e=&R}k%E~g zRPg5u^&FlKM=p-!Lh2|vrqRy8&EHp#WroHMfA__BB@koXdCvuiIL6s%FAcGM$mQst zNbVTvYp#70hsDw-NqGOqE5>v^Od?|Nz|dChm?UNB^yJTNVGq{U)G9J``?^j@C+RsnlM={8O=s!r$w*Sz&3*N};LGbqjz`izu*{F! zyZ-&wD`4FKyf^Y(yiAu-UK2r|&o!3Wr&OVic0pyXrF?)y4J*}WS*)OC1wm=)qgzrUb`ml&!?3~bw|QJlF0wI2t%*7JXOfj z-av>lYCE)7$4-B4tr!rE@w}Cape`GlRL_ROjBAjzQqmHNBJ@fs%J|@mNkkgOyjM^v zU;BnFS|>uwK11XxwQN3|%~=bK^8A#+BR2)(UV~+OF?i~<^Vjc4g?eZ~x~B32-h18* zZ%@TYw6YnP?6mz*HQeBqp)Mh(X?S_LsE+P&kPyI#_7>DHI(PDu87%iey|9BEG^6;% z1+6Kqu3T5{)eo#EtnWHyD?5!wqGA)x5)5sK8Tw*52}cFEUSbMznEuQs=8?Kgaje@P zq*fegB7O0!#fg=LB#yNs{1w7%cGnu z{ypUU!w)LdXoaZD-O?fZvthmQ`;Syf#!d6}G4Ib*@~hWv&v7S22&j?%v8*#_QW+VY z-L+aicBw0vIGUx;Q&i5DX!UvIUq3Q>X(%*Vb5rDh+H6*?VU8*h&LM1KEJDiAAs(2hiL5Qs_iE6 z*`D{+YiGW=#{%o*2<)4=mS<)6oXE^X5FW=mKl{VcY}MKjz=%#YXv=u&uq&_-;IwfB zT`?$NUpDzpylI-od6gt3wv)WF>DeG&hUP<6F$H`ruGP@b`W^&n zUBy0E|KrH_QKosOS&g^?y?jgG9HQhno~rXUvS$qGuS(KM_McA-2ggsH&J0$#J?kH6zA63U z{>pGmt9-s^q~d{aS-%G%2D+K@6H~9Ojvt-u?JX;?{CoFlAryt(@63PLQEFKiyB%~* z$p&Mh*LJB_HQVxR{8He{D;QLuklH8XT=1Asd+>O=O46M!Ph2PCE@O?D;LXl>-5)N_Q^?(C`r+DQXcuD zOBAY;HmER9PvqGxZ@N_^NUVfD#!p0{-+q~Dbp7IgsJ?m!*olPa9&l2d&MU zt*8Ue31|nA&*(}DD7LXMt@L3CXCx&4u>b0iX)em z-lDVqJag&!5<@hWOoUf&doFu*=S?j=34Auc-+N8tO+f%)@F>MF5LoG97kR>0!}&UM z;mDz7k(hMY@P|z2lJ+!TOi)e*L zl@iP*6>QjR?a3Gi;K?IDh8W2`QGcaVxj%s@_04ySsmRLb7Pjck3x+!&x$yGPFwU%qBa(Eq44dsc5>7z(RZ~s&ATD_$ zwYQaX!n1fkE~jMe_|jy`i(*g3PcPX@S8AA?8s=1oBJ!NNAu7ETQ$kYs2IAG%!@!f= z&${tgSyLIjRYM<>eV?4r@0q-NFH-OjHH(f-zwsD2g}=&XA%+?{2|GU#I0D72n5R2* zxIg|N!p7@*%_zxi=#k=JYT7WIpo@nzQSSPT)%9|@uUlRw@}BuhFp+Yq;STegkq(Ik zV{*h^#XN#~fVr2hK>NL4C`yx> zhEGmCpH~AR+?(Hotv58EWfxidkcR!FzsOHy_=T3z!Q%8%^HBOlCjQI83VtHzjKjk( zv;Ac;3x|RfHnWNE#@}*~*swbTH*z71T1?CA<_~3X%q(p-r!AnRQpD8&@H1vPU0Rsy z04FE?dWzcegx=1$CpO#6Irc-WW=G#Vc#7}#cE3P``b>y@dR3Ob zDps*VGSnir9Vzpc83g>hg+PfYXLvm+R zM8%7+OO|LVW(x+RWDSZ@JoR2^$sbamPhog?!NOf=D^|;d3kQs|ThMN?FGb8mPiH6X zyc&uHbGaYxQ23e#&3SVJ+`)SB7Zzo57KEuee-g5oVCD3Z-ooP=kM82)CWvUjNOd1ty?tHD z;$LuIHAQ4D10-h?>A_?(Uph~W;gW;Nllu~#yplBIGyZw2XzT*1b;BY4w*6XWQ6inO z=%*pSG?hgQ^F-xOaXn~Y+Nqi%freHOC#`Z8$X={nT(eucf-d))3ijN0 zUxby|Ka(unEb3FMurlYO*_W4SANWrM6+(=Iq6k>|^!3MJB>V^rN%XK!xHD6sO!-L- zvsqrcOKBy-q_MKnA~uOEw7Y&P~>%DMsHjt@=4O-Coxp&a8*W^dS04P0C;T3L<=|CA67(9eIZ_OP%k@6)T94X~B@ zC&lc2k|*^MXkTh*cvaEbIPfGnaaYMoxJ@Yf1f@Y`Ta%c!5coVT(sr5(#I{c~$e|YZ z>-Bt(4i1Em)yzbASHG>%TdM)6>4rXY)#Au`R8m2V68@81sL~SJ)bwY-JXc3WenUi_ z&bcM&CztR~nWV-X{|#x>&*MK0T<{dKo8yBUh)Lkh+9#pF6OQ?gX2A@QZg^<0kjJW`i;ydnJav()K0nqL2Mo7 z<%VF0M`EU>f|P`@1V7$Klftc03o4b%%Es2vyrQJqhm{*jc|0oY+=|H?-FNPs!Jysk z?{lyXVpR?K}kwOt$$hFNY!>-*?q1mvy4Prv0~by zcQYT5MDW1}ge5775UgJ6YpyhQ{!mD&2@ofW<;RSAY?QjHvvtd9szyBNex|#hH zE)eLIYbeAGUy+%fl_^$>wLQBEZbU$?U}=@JO0^!XkUmrIpj!WUb(}NNz^o-|B5qa{ zH=l16K`kk5M*a?0FwOT6LB45~sI&qFJ?u#%yVGYAi5FZVuvtW0rbxKqA6Dou!)&;0 zTk);2FBXi6ZM=DGYDyRHubswqZnI}ETgs;2$yoPB_=Si_;Duie9E~6dcH4?|FLlQp z5??Q&^w;*V^8Y|U^dQiqGxH3F1Qc|3QsQSln2yX{EGy#;vb2`TlZ*HQvM#5$YbhIcOHi+bopo~l zp&@~5uAbjW984$v%|fd(Rxth9DdEwbst^$9x)V4na*B09+rKRU=z+8dApKns= z!Rj0wYOdS=*wp_We|V|a{?z<_&6y)W;5b4}(W&uj!La$f{I5gaS@5op>k=uq=f+Y} zW4H^Wa`sQ&{t>qR v_Uu1-`x_Vk$l3pEy!|`f@A9hurg;5-1P&kazbrrakM$}1m;B)WcAWnKLA|(g literal 0 HcmV?d00001 diff --git a/pkgs/swarmauri/tests/static/hyperbolic_test_tts.mp3 b/pkgs/swarmauri/tests/static/hyperbolic_test_tts.mp3 new file mode 100644 index 0000000000000000000000000000000000000000..194714d1649c1c74f4cd59df9157095c5495a26c GIT binary patch literal 10005 zcmeI1cTg1Dm&Y3zqU4MM4mk=#&Pj4oh9p^XmLvv1WXMr+6p$!kBq%wEl5-GYP((l^ zXAp@4^uq4`-tNAiuU@^X{p~;dZq?LupSgX$w?A`k-*Zoc==e_H_sz)3&EdjEa`6!Z z01hgEh?JCqf|{CH8ndwzoewBtg^DI zs;0iasi~!KMViI5C31jL3C~>fFVzabrnDWP${4w zIUd0DfIi8X1y7=@!Hc%9j)Y+)M7;suZ%tX&iOPx)n9DtE7WFs1QS7h%pe%ZQ69hl9 zU#UVp3;@B^fxAm9@7|hsXXr)7xS0DsI`kL$4ZFdAH7I6p0C;Mk93Al<1P>qbzf1E= zCW6_o>rGW&==f6#Iw(#|BOPZPh6w5MNfqxyTbQB>&_pk4ShRab8+{y4K+3IVX2|jKiH*GAXUNBTm$dkiYjQD z&W0zOaMJ52M2KX(K|m}8NCygtMqQnbdSZx3dq*>48elViTDPIwh;E{xtum%`!qoz< zd~Q|lbbQt>0z6*khN~FDq<9t$>JO60U}Q11PESjxAL@ z(zw(t7s2laShvOlz-ZaE%*TQ7rY;M0hDo%$P4e4K;q+A7{DMeq!ZDo@k8)7--ZC_p zn-=~=4+88wl~G#TOE$pvd*t)jzjCrZtJ~GWTbLIo5JH#(`Gy+IrSI#y)Y-$j`2>sh zLWeXB$VXi05ONB)$F%ho5yS)61zsxZ2#H{{s;$sILy`uiwE(RvJc}ayc@I!O>ILdW zB$BPHW+oEo9JRDR)HB(|3{82gEMGsi&;VMJ%)WiAk2WdIHuNF8)Y%0TPO&d_fV4oC z!XyB29lXVio(Qx2+zIPv=wUR_)Rt&COZ)MH-}hSh^}~CnG>QxJg5eV?-$(;vRY_%^ zX_9bww!W=x)CpXOH0}wVT#cTt)@xR$=qh;)yQ(H~`04X!5bebbuPSQthC@U^B_q8IKI7*X3QB5Oq^L>c>S3D73!k1VrT)BZ6wbYTr5HIfBezIx^ zbp=ubc&=5qw&QahT|RUqK8}~DiOI>kpJiPl%vL+%R?KALep7Z2r^xczDG z#LD7iPn4q4q~xno5igqI?F;FbhLP@tX0=Wk-0atCX)X8Ix6Fh3#YqSbNEO!@H7#Sz*+Y{{L(=};h>Q)qSR3{b^i(M_o8Ly) zsf3+m!rz=a`xYp)V-<8k2BXI4$V1DU^| z_ptO>uZ2nCwwv!OXOzhU<_x1bpQ36y@l>Pcs0?poTJuNVYn_ndeZeY+Scs$MRlfL* z@lR@W%tk@5#Lgt{dsj?6yrbP~g&5X?ZCDKJpc_$)w8<*Egj%u6X=dNTFuqhD8|n{a z6}q*rrnX3KWCSdJ%oiY(#}Q~#FI3N)QcUkD1~T+`;4~n3?r7Rq z_gH=obCfk9JAjgL*vDiY4kdfEEx zvQiW`-L9vql3a^bA#;ih{g{a(^J!0OFLiHsFO?Q1R%{eA9(Otkxs2>31bj2oV1te{ z?@O6WV|dKwTEI~a9(i;XkA*E~dnp6wk}-I8nziRMHylt!bDKrpJ7Qbz=TPC>>0mwf zo`zoIHdZW>(S-JKCLAAzge0{X^JHeo@WH1LOXH3xMATPS-pX3^pao5{tmMOc>eoIg zkwr)&cl(}sKH|LAFkVOD4UNK9#<>pT;{ND96vy;oanXM(f|Q1m+E{5_k>(^IYkB79 zO+33-%?mDEW@{U>=Y#iK5|qsq8|mBU@E}5)JDoL7l=h3MR0?aMJq_}tYrtGLAz_WA zjwMT8L-oiH^VZwXqqpn_B0j7uahP;&vmxhM-h8KhVydOt(Af~Ah@rr6t8IwGHC0-{c)T|j_s^Jyu38_j|o#b@swYqGUi6DhV zz0`|E`0$}1p=a=57*keR2&&d(%c*X~3a-jAML_SVX5Ph%OAp=fQa}|3*?A8&f zxRm5muv|4);fM`Sf;Ol}M<@e7i~p@1J4o26RHGhM7gFsGrGB>U0oQD^4sr?-g$>Vr1aZL;kZ~SQT*L$s=r6ohqRE8tDmQ={obG+#Mk}&>@vB%?1i5<%f#A@C;@^TN4HNR6u zku$eP@lTh>r~h=d94}70$dX>ztAbq4xd`K);OeC!myd!pPRo37$x8?;0127hy$xvP z>X)*OmJq7_fbeH}`f7yR#HFbse9_RdyC@IcEFVRUJ53z35E*EL$sPf&xns9JwBlyCl5RF9YH%1;R`ReX=jr2e+lQ1VviUB*SZxy<*6xA;=&R%41G*nMqVKQ6M})i%|#?IedTwR z2_e40e+L}1(`F}$d+X_!oQct_LtRp&Yk2T31sO#^mpjVY-X0cshlhmEEhbq&9{8&F zuru9M)}?nij$q~MrUWw6HJOfvYIMwrj-~>}W&19BFGXFumW0tGzJLBE6Ejlo_`A`{ zVTIeAXP~}!x^fP9{_4&{ukDL}D!sHM=k`bb7u>f)*v*c_6cM*%bQ2qEkNMKNO9E$< zX^vS?8OBD|ogR5SXO#wsZ;`!=r?Z2L?zIJLx0;LJmuS|0|3)u!YyIG8yJ^wU)O86& zr-(1!$vpu|lbg)HGyITqo2p6FD0V;X>(07J0iZ$Q4EGMFFCi=gB-#Gv8*i>yb-r?G zNv6J6QR5C)mkiAeBg^D$ka+m%I%b|d#rxbIG=0x?O+`PY$H?nF_z($yC7@!*+sk9~= ze{~aCA%e>5HkXW3AcVpcP^jkTAQ;u;egWhB`1~E)Oe*|)Xa#ZEttAs(qO!4J1}7O>9&1Cj#G1U#i8KkigGa>sDe#51+Fliz#688d7ZV@M1Mn5x)`ZR?$aNy|l@c3#1r(?`oFETQR~5gqD6dlBo6?BdLur0KJm zdEa2A78p_z@Fk|i;{4$2zV=4@2wtqV1OZxRZyqI6?j$3}kj@X|g14LrtPTik+6Gq6vtx_pd4!8mjRe#fq=y4`4TR%%B zkRdNn+koFdy{1UHi4$;u!nX|)3ct&YNg{E0)eF)Q+i;^sa=62Y^mW~&v*NWmJMB03 zaU#mFloa*5HIW7`UYZF^DKeeQeYN0KHD6v+5Ii&XhVrtG2Y}}jhWiw9*Y)@E^M$5% zwd{9kXv{>EohWLjznU&L>cqaf@7#YsB6u&1%6Zhp5*XDY=K!~?K3g{Bmgtf*GrBR+ zcM*H4#1T}CZ%Lp6sZ%0W^~*C}QZL?^ewZmhtdoiiXP0w$D1$-Y8dk+n$Spg3vos_) zKdpEV8kt_p%oVWj{1!bTVi;Sorbk<#(=K#fsJ76ByYUjj5rUj4$7+L(#OaHG>sc_t zjjTdO`RBM@3U)0!G&qt3DW3w)L{R`HLT~q)C6-`xw~R+yKCpw&sq*S5Tvgq=eS%o{ zsETLTQF7@}wJEp@>y0Qr0g?AaezDcAtq4ODFCL<{oRQPKzxW<8UuyaOq$=NrRj&k{ zY-Y*O2s*t$#rc|usq{V>R2qh5ET;aY^6aeY{ZtVQmBr@fgl(N$I$I(S0N56Oi8mpw z`a)%@W_X+5b?{2l^4wgmT14-9(V|=J#-g%Y)F!Ug_SYJD=E(GZE19=on`3RuXP`Z# zf4J23EZ>B&z;my4F=IIZM5AQhy?gpp?69U;MBtU5$t8ph1j%Z+%?2A`-Vx7+e;8G5 zfi|VjEtcGu()aylP7BEAxsGTBmu}qo&JxS>-lQbhI!fbW3-tcL#u5i4Ad`t~xQW#-VrZCer20?S{7tH}^dH&E<1s5kZn+Y*3awHV3|0mq}&H zdZS1b|E)CV>&RIcIax*(dUL!ghcjue{ybpBGEf2H|0!ogtR;=NNow*6XT8fcW}H`^ zxH5Lg3o=+nZoQnqch)N4m1k(bwf&b^4>LSlTT$k_!cMlXqO{Gq6qTXwhhpsJ=BSkl z3YbR>>TMAf$JtL6-Ihc0crWA7TRpdzvkV3_^b}iA4h&8MhHV3t#5UD)7-5ttMw=vxP4NsWuYfydX{|&VJnD)=&s&Idd2E=u5(K&e#=X9 z1&1p;7Cvhjm;iQqj$2HUQ-nC1V$=%}p6Iw(r<>N~EA8~SM zoW?i0gRrV~wXz=&b_*+ch1)Ut@~IC2t_6qOWU zM;RL#IQJmkSZOi|G$CagU41%{Z%=nV;oYYfC2%%0V7R_ZX2bP9qc$~}7(V}eDkG)2 zXc60b>qfb~n`ramJ@ynsIS_0JSO3}Y(MEgX+<^SwAs|RXxwJQFJ)IU_ubw62pOrX& z8FhaewtMjaz1jS8+%Ysll*w23ggsbYu-c@6%nF%M1pP@&lA~nJR&n4I<+x<-SW+%-iPQRltP(&jR$FeL>!&WHq+h>&o%6ruj96BI$WN2HEfd;=Vy2!9dp=%xF!_HhX{ z!Bgd)h$zlnQ@g!e3Ul^Z;RY}(QgVm5dsRw;be9m;5kwnk_y()?cR7LWGfdOZGoHfV} z*0m>;_sWM|)zrAjiqG!$q8y^68+z8KwwIU!3(p(0G(v2IuN`44|5U0heD6=-p3f$O z;*Em-lc8ucUj=tfrJ%MCb~r!9Km|;)$m;f^d15IGcVqIj!XFv0P_{?Ebb~%q(S=xO3E>bV7L3V9j|CAf=~}hHlT~9?i`KB{Shcb z3ymhap_k4+qO$~;XXj(TCKz|?$$xt7;CWRGr~|N0Rm`cxU_fOS9cRj8H%J~MY0`&Xq}5KJ7EHg!UnCUl~1Ch^v?5`H$}Nt^@13x zdM$PJ2Bo!64jXiKsKBddtp4YK~_0uge4w+Qf5HdRqaM8-s!kCs7hVigI%CrGZnEeCZRrs3*-ZGgW+}=cdEC*{0(o9h0=;g)5T*Kd$oU5v|F(NfC!@a z`q6rU`^VO9Td1t9PXK)5;?W8cJUPPXq+-fM83&$!}M~{;Rlt zd31R{y9kY%Vv#cGek+66BAGCvl>gb~znz6ZFG{h1DZPMN+bjrnGy$}DbiEwPAb9M| zXmzVJiT@0F0Fs;){yE&Hx_6g+Fi=kttsGo6BER@_%mv(Xh^A z1iqRwf0=Y|DIpmv>uzAf?>iv?4ACI|%kd)l!GhUOcW3WR>-aeT6`kL^{Eu@e)-)On zIOJE3J_GN})|VLk=JN8UFDuOlfJ)~Bj^aXBs2@f_;a&{zM>PJL{hxGp0maAow4E^k zV2@!_Xao6I=`05-Y+SkS5CMg1T~yBZfPax@}vOdulf6<-v6XCfz=9N zoL}Uw7c*DxVut;#^GkGpbNeNhKQ;c%?N1T?vcuopeu?Ezjem3dQ$)Y)@He+#V);|! k{}s3YF_6nV@n5uV|Nqb_{EvJ`81cWB@BEtEzgXwr0PHrJQ2+n{ literal 0 HcmV?d00001 diff --git a/pkgs/swarmauri/tests/unit/llms/HyperbolicAudioTTS_unit_test.py b/pkgs/swarmauri/tests/unit/llms/HyperbolicAudioTTS_unit_test.py new file mode 100644 index 000000000..9b81c84ef --- /dev/null +++ b/pkgs/swarmauri/tests/unit/llms/HyperbolicAudioTTS_unit_test.py @@ -0,0 +1,141 @@ +import logging +import pytest +import os + +from swarmauri.llms.concrete.HyperbolicAudioTTS import HyperbolicAudioTTS as LLM +from dotenv import load_dotenv +from swarmauri.utils.timeout_wrapper import timeout +from pathlib import Path + +load_dotenv() + +API_KEY = os.getenv("HYPERBOLIC_API_KEY") + + +# Get the current working directory +root_dir = Path(__file__).resolve().parents[2] + +# Construct file paths dynamically +file_path = os.path.join(root_dir, "static", "hyperbolic_test_tts.mp3") +file_path2 = os.path.join(root_dir, "static", "hyperbolic_test2.mp3") +file_path3 = os.path.join(root_dir, "static", "hyperbolic_test3.mp3") + + +@pytest.fixture(scope="module") +def hyperbolic_model(): + if not API_KEY: + pytest.skip("Skipping due to environment variable not set") + llm = LLM(api_key=API_KEY) + return llm + + +@timeout(5) +def get_allowed_languages(): + if not API_KEY: + return [] + llm = LLM(api_key=API_KEY) + return llm.allowed_languages + + +@timeout(5) +@pytest.mark.unit +def test_ubc_resource(hyperbolic_model): + assert hyperbolic_model.resource == "LLM" + + +@timeout(5) +@pytest.mark.unit +def test_ubc_type(hyperbolic_model): + assert hyperbolic_model.type == "HyperbolicAudioTTS" + + +@timeout(5) +@pytest.mark.unit +def test_serialization(hyperbolic_model): + assert ( + hyperbolic_model.id + == LLM.model_validate_json(hyperbolic_model.model_dump_json()).id + ) + + +@timeout(5) +@pytest.mark.unit +def test_default_speed(hyperbolic_model): + assert hyperbolic_model.speed == 1.0 + + +@timeout(5) +@pytest.mark.parametrize("language", get_allowed_languages()) +@pytest.mark.unit +def test_predict(hyperbolic_model, language): + """ + Test prediction with different languages + Note: Adjust the text according to the language if needed + """ + # Set the language for the test + hyperbolic_model.language = language + + # Select an appropriate text based on the language + texts = { + "EN": "Hello, this is a test of text-to-speech output in English.", + "ES": "Hola, esta es una prueba de salida de texto a voz en español.", + "FR": "Bonjour, ceci est un test de sortie de texte en français.", + "ZH": "这是一个中文语音转换测试。", + "JP": "これは日本語の音声合成テストです。", + "KR": "이것은 한국어 음성 합성 테스트입니다.", + } + + text = texts.get( + language, "Hello, this is a generic test of text-to-speech output." + ) + + audio_file_path = hyperbolic_model.predict(text=text, audio_path=file_path) + + logging.info(audio_file_path) + + assert isinstance(audio_file_path, str) + assert os.path.exists(audio_file_path) + assert os.path.getsize(audio_file_path) > 0 + + +@timeout(5) +@pytest.mark.unit +def test_batch(hyperbolic_model): + """ + Test batch processing of multiple texts + """ + text_path_dict = { + "Hello": file_path, + "Hi there": file_path2, + "Good morning": file_path3, + } + + results = hyperbolic_model.batch(text_path_dict=text_path_dict) + assert len(results) == len(text_path_dict) + + for result in results: + assert isinstance(result, str) + assert os.path.exists(result) + assert os.path.getsize(result) > 0 + + +@timeout(5) +@pytest.mark.asyncio(loop_scope="session") +@pytest.mark.unit +async def test_abatch(hyperbolic_model): + """ + Test asynchronous batch processing of multiple texts + """ + text_path_dict = { + "Hello": file_path, + "Hi there": file_path2, + "Good morning": file_path3, + } + + results = await hyperbolic_model.abatch(text_path_dict=text_path_dict) + assert len(results) == len(text_path_dict) + + for result in results: + assert isinstance(result, str) + assert os.path.exists(result) + assert os.path.getsize(result) > 0 From e2005640bfb576901eb802d15511fca97e74d354 Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Wed, 20 Nov 2024 12:46:21 +0100 Subject: [PATCH 06/42] comm - changed test files path --- .../embeddings/Doc2VecEmbedding_unit_test.py | 22 ++++ .../parsers/TextBlobNounParser_unit_test.py | 0 .../TextBlobSentenceParser_unit_test.py | 0 .../tests/unit/tools/TextLength_unit_test.py | 0 .../Doc2VecVectorStore_unit_test.py | 72 +++++++++++ .../swarmauri/parsers/concrete/__init__.py | 2 +- .../swarmauri/tools/base/ToolBase.py | 3 + .../swarmauri/tools/concrete/SMOGIndexTool.py | 113 ------------------ .../utils/extract_signature_decorator.py | 85 +++++++++++++ .../vector_stores/concrete/__init__.py | 2 +- 10 files changed, 184 insertions(+), 115 deletions(-) create mode 100644 pkgs/community/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py rename pkgs/{swarmauri => community}/tests/unit/parsers/TextBlobNounParser_unit_test.py (100%) rename pkgs/{swarmauri => community}/tests/unit/parsers/TextBlobSentenceParser_unit_test.py (100%) rename pkgs/{swarmauri => community}/tests/unit/tools/TextLength_unit_test.py (100%) create mode 100644 pkgs/community/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py delete mode 100644 pkgs/swarmauri/swarmauri/tools/concrete/SMOGIndexTool.py create mode 100644 pkgs/swarmauri/swarmauri/utils/extract_signature_decorator.py diff --git a/pkgs/community/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py b/pkgs/community/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py new file mode 100644 index 000000000..7f3afc447 --- /dev/null +++ b/pkgs/community/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py @@ -0,0 +1,22 @@ +import pytest +from swarmauri_community.embeddings.concrete.Doc2VecEmbedding import Doc2VecEmbedding + +@pytest.mark.unit +def test_ubc_resource(): + assert Doc2VecEmbedding().resource == 'Embedding' + +@pytest.mark.unit +def test_ubc_type(): + assert Doc2VecEmbedding().type == 'Doc2VecEmbedding' + +@pytest.mark.unit +def test_serialization(): + embedder = Doc2VecEmbedding() + assert embedder.id == Doc2VecEmbedding.model_validate_json(embedder.model_dump_json()).id + +@pytest.mark.unit +def test_fit_transform(): + embedder = Doc2VecEmbedding() + documents = ['test', 'cat', 'banana'] + embedder.fit_transform(documents) + assert ['banana', 'cat', 'test'] == embedder.extract_features() \ No newline at end of file diff --git a/pkgs/swarmauri/tests/unit/parsers/TextBlobNounParser_unit_test.py b/pkgs/community/tests/unit/parsers/TextBlobNounParser_unit_test.py similarity index 100% rename from pkgs/swarmauri/tests/unit/parsers/TextBlobNounParser_unit_test.py rename to pkgs/community/tests/unit/parsers/TextBlobNounParser_unit_test.py diff --git a/pkgs/swarmauri/tests/unit/parsers/TextBlobSentenceParser_unit_test.py b/pkgs/community/tests/unit/parsers/TextBlobSentenceParser_unit_test.py similarity index 100% rename from pkgs/swarmauri/tests/unit/parsers/TextBlobSentenceParser_unit_test.py rename to pkgs/community/tests/unit/parsers/TextBlobSentenceParser_unit_test.py diff --git a/pkgs/swarmauri/tests/unit/tools/TextLength_unit_test.py b/pkgs/community/tests/unit/tools/TextLength_unit_test.py similarity index 100% rename from pkgs/swarmauri/tests/unit/tools/TextLength_unit_test.py rename to pkgs/community/tests/unit/tools/TextLength_unit_test.py diff --git a/pkgs/community/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py b/pkgs/community/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py new file mode 100644 index 000000000..01c8eb634 --- /dev/null +++ b/pkgs/community/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py @@ -0,0 +1,72 @@ +import pytest +from swarmauri.documents.concrete.Document import Document +from swarmauri_community.vector_stores.Doc2VecVectorStore import Doc2VecVectorStore + + +@pytest.mark.unit +def test_ubc_resource(): + vs = Doc2VecVectorStore() + assert vs.resource == "VectorStore" + assert vs.embedder.resource == "Embedding" + + +@pytest.mark.unit +def test_ubc_type(): + vs = Doc2VecVectorStore() + assert vs.type == "Doc2VecVectorStore" + + +@pytest.mark.unit +def test_serialization(): + vs = Doc2VecVectorStore() + assert vs.id == Doc2VecVectorStore.model_validate_json(vs.model_dump_json()).id + + +@pytest.mark.unit +def test_top_k(): + vs = Doc2VecVectorStore() + documents = [ + Document(content="test"), + Document(content="test1"), + Document(content="test2"), + Document(content="test3"), + ] + + vs.add_documents(documents) + assert len(vs.retrieve(query="test", top_k=2)) == 2 + + +@pytest.mark.unit +def test_adding_more_doc(): + vs = Doc2VecVectorStore() + documents_batch_1 = [ + Document(content="test"), + Document(content="test1"), + Document(content="test2"), + Document(content="test3"), + ] + documents_batch_2 = [ + Document(content="This is a test. Test number 4"), + Document(content="This is a test. Test number 5"), + Document(content="This is a test. Test number 6"), + Document(content="This is a test. Test number 7"), + ] + doc_count = len(documents_batch_1) + len(documents_batch_2) + + vs.add_documents(documents_batch_1) + vs.add_documents(documents_batch_2) + assert len(vs.retrieve(query="test", top_k=doc_count)) == doc_count + + +@pytest.mark.unit +def test_oov(): + """Test for Out Of Vocabulary (OOV) words""" + vs = Doc2VecVectorStore() + documents = [ + Document(content="test"), + Document(content="test1"), + Document(content="test2"), + Document(content="test3"), + ] + vs.add_documents(documents) + assert len(vs.retrieve(query="what is test 4", top_k=2)) == 2 diff --git a/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py b/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py index 45b1c7640..a150d0de5 100644 --- a/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py @@ -24,7 +24,7 @@ def _lazy_import(module_name, module_description=None): "PythonParser", "RegExParser", "TextBlobNounParser", - "TextBlobSentenceParser", + # "TextBlobSentenceParser", "URLExtractorParser", "XMLParser", ] diff --git a/pkgs/swarmauri/swarmauri/tools/base/ToolBase.py b/pkgs/swarmauri/swarmauri/tools/base/ToolBase.py index 3dfade766..dedd82244 100644 --- a/pkgs/swarmauri/swarmauri/tools/base/ToolBase.py +++ b/pkgs/swarmauri/swarmauri/tools/base/ToolBase.py @@ -19,6 +19,9 @@ def call(self, *args, **kwargs): @abstractmethod def __call__(self, *args, **kwargs): raise NotImplementedError("Subclasses must implement the __call__ method.") + + def extract_signature_details(self): + pass # #def __getstate__(self): diff --git a/pkgs/swarmauri/swarmauri/tools/concrete/SMOGIndexTool.py b/pkgs/swarmauri/swarmauri/tools/concrete/SMOGIndexTool.py deleted file mode 100644 index 23ce384df..000000000 --- a/pkgs/swarmauri/swarmauri/tools/concrete/SMOGIndexTool.py +++ /dev/null @@ -1,113 +0,0 @@ -from swarmauri_core.typing import SubclassUnion -from typing import List, Literal, Dict -from pydantic import Field -from swarmauri.tools.base.ToolBase import ToolBase -from swarmauri.tools.concrete.Parameter import Parameter -import re -import math -import nltk -from nltk.tokenize import sent_tokenize - -# Download required NLTK data once during module load -nltk.download("punkt", quiet=True) - - -class SMOGIndexTool(ToolBase): - version: str = "0.1.dev2" - parameters: List[Parameter] = Field( - default_factory=lambda: [ - Parameter( - name="text", - type="string", - description="The text to analyze for SMOG Index", - required=True, - ) - ] - ) - name: str = "SMOGIndexTool" - description: str = "Calculates the SMOG Index for the provided text." - type: Literal["SMOGIndexTool"] = "SMOGIndexTool" - - def __call__(self, text: str) -> Dict[str, float]: - """ - Calculates the SMOG Index for the provided text. - - Parameters: - text (str): The text to analyze. - - Returns: - float: The calculated SMOG Index. - """ - return {"smog_index": self.calculate_smog_index(text)} - - def calculate_smog_index(self, text: str) -> float: - """ - Calculate the SMOG Index for a given text. - - Parameters: - text (str): The text to analyze. - - Returns: - float: The calculated SMOG Index. - """ - sentences = self.count_sentences(text) - polysyllables = self.count_polysyllables(text) - - if sentences == 0: - return 0.0 # Avoid division by zero - - smog_index = 1.0430 * math.sqrt(polysyllables * (30 / sentences)) + 3.1291 - return round(smog_index, 1) - - def count_sentences(self, text: str) -> int: - """ - Count the number of sentences in the text. - - Parameters: - text (str): The text to analyze. - - Returns: - int: The number of sentences in the text. - """ - sentences = sent_tokenize(text) - return len(sentences) - - def count_polysyllables(self, text: str) -> int: - """ - Count the number of polysyllabic words (words with three or more syllables) in the text. - - Parameters: - text (str): The text to analyze. - - Returns: - int: The number of polysyllabic words in the text. - """ - words = re.findall(r"\w+", text) - return len([word for word in words if self.count_syllables(word) >= 3]) - - def count_syllables(self, word: str) -> int: - """ - Count the number of syllables in a given word. - - Parameters: - word (str): The word to analyze. - - Returns: - int: The number of syllables in the word. - """ - word = word.lower() - vowels = "aeiouy" - count = 0 - if word and word[0] in vowels: - count += 1 - for index in range(1, len(word)): - if word[index] in vowels and word[index - 1] not in vowels: - count += 1 - if word.endswith("e") and not word.endswith("le"): - count -= 1 - if count == 0: - count = 1 - return count - - -SubclassUnion.update(baseclass=ToolBase, type_name="SMOGIndexTool", obj=SMOGIndexTool) diff --git a/pkgs/swarmauri/swarmauri/utils/extract_signature_decorator.py b/pkgs/swarmauri/swarmauri/utils/extract_signature_decorator.py new file mode 100644 index 000000000..412d10b90 --- /dev/null +++ b/pkgs/swarmauri/swarmauri/utils/extract_signature_decorator.py @@ -0,0 +1,85 @@ +from typing import ( + List, + Any, + Union, + Optional, + Callable, + get_type_hints, + get_args, + get_origin, +) +import inspect +from pydantic import BaseModel +from swarmauri.tools.concrete.Parameter import Parameter + + +class MethodSignatureExtractor(BaseModel): + parameters: List[Parameter] = [] + method: Callable + _type_mapping: dict = { + int: "integer", + float: "number", + str: "string", + bool: "boolean", + list: "array", + dict: "object", + Any: "any", + } + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.parameters = self.extract_signature_details() + + def _python_type_to_json_schema_type(self, py_type): + if get_origin(py_type) is not None: + origin = get_origin(py_type) + args = get_args(py_type) + + if origin is list: + items_type = self._python_type_to_json_schema_type(args[0]) + return {"type": "array", "items": items_type} + elif origin is dict: + return {"type": "object"} + elif origin in (Union, Optional): + if len(args) == 2 and type(None) in args: + non_none_type = args[0] if args[1] is type(None) else args[1] + return self._python_type_to_json_schema_type(non_none_type) + return { + "oneOf": [ + self._python_type_to_json_schema_type(arg) for arg in args + ] + } + return {"type": self._type_mapping.get(origin, "string")} + else: + return {"type": self._type_mapping.get(py_type, "string")} + + def extract_signature_details(self): + sig = inspect.signature(self.method) + type_hints = get_type_hints(self.method) + parameters = sig.parameters + details_list = [] + + for param_name, param in parameters.items(): + if param_name == "self": + continue + + param_type = type_hints.get(param_name, Any) + param_default = ( + param.default if param.default is not inspect.Parameter.empty else None + ) + required = param.default is inspect.Parameter.empty + enum = None + param_type_json_schema = self._python_type_to_json_schema_type(param_type) + + description = f"Parameter {param_name} of type {param_type_json_schema}" + + detail = Parameter( + name=param_name, + type=param_type_json_schema["type"], + description=description, + required=required, + enum=enum, + ) + details_list.append(detail) + + return details_list diff --git a/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py b/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py index 08a36e26c..7aee51192 100644 --- a/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py @@ -12,7 +12,7 @@ def _lazy_import(module_name, module_description=None): # List of vector store names (file names without the ".py" extension) vector_store_files = [ - "Doc2VecVectorStore", + # "Doc2VecVectorStore", "MlmVectorStore", "SqliteVectorStore", "TfidfVectorStore", From 22cfec302f4e7441c0e87ba5bb319b3b7eef8f9e Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Wed, 20 Nov 2024 13:32:13 +0100 Subject: [PATCH 07/42] comm - dependencies fix --- .../embeddings/concrete/MlmEmbedding.py | 0 .../parsers/concrete/BERTEmbeddingParser.py | 0 .../vector_stores}/MlmVectorStore.py | 2 +- .../unit/embeddings/MlmEmbedding_unit_test.py | 2 +- .../vector_stores/MlmVectorStore_unit_test.py | 2 +- pkgs/swarmauri/pyproject.toml | 20 +++--- .../swarmauri/embeddings/concrete/__init__.py | 8 +-- .../swarmauri/parsers/concrete/__init__.py | 4 +- .../vector_stores/concrete/__init__.py | 2 +- .../embeddings/Doc2VecEmbedding_unit_test.py | 22 ------ .../Doc2VecVectorStore_unit_test.py | 72 ------------------- 11 files changed, 20 insertions(+), 114 deletions(-) rename pkgs/{swarmauri/swarmauri => community/swarmauri_community}/embeddings/concrete/MlmEmbedding.py (100%) rename pkgs/{swarmauri/swarmauri => community/swarmauri_community}/parsers/concrete/BERTEmbeddingParser.py (100%) rename pkgs/{swarmauri/swarmauri/vector_stores/concrete => community/swarmauri_community/vector_stores}/MlmVectorStore.py (96%) rename pkgs/{swarmauri => community}/tests/unit/embeddings/MlmEmbedding_unit_test.py (85%) rename pkgs/{swarmauri => community}/tests/unit/vector_stores/MlmVectorStore_unit_test.py (89%) delete mode 100644 pkgs/swarmauri/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py delete mode 100644 pkgs/swarmauri/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py diff --git a/pkgs/swarmauri/swarmauri/embeddings/concrete/MlmEmbedding.py b/pkgs/community/swarmauri_community/embeddings/concrete/MlmEmbedding.py similarity index 100% rename from pkgs/swarmauri/swarmauri/embeddings/concrete/MlmEmbedding.py rename to pkgs/community/swarmauri_community/embeddings/concrete/MlmEmbedding.py diff --git a/pkgs/swarmauri/swarmauri/parsers/concrete/BERTEmbeddingParser.py b/pkgs/community/swarmauri_community/parsers/concrete/BERTEmbeddingParser.py similarity index 100% rename from pkgs/swarmauri/swarmauri/parsers/concrete/BERTEmbeddingParser.py rename to pkgs/community/swarmauri_community/parsers/concrete/BERTEmbeddingParser.py diff --git a/pkgs/swarmauri/swarmauri/vector_stores/concrete/MlmVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/MlmVectorStore.py similarity index 96% rename from pkgs/swarmauri/swarmauri/vector_stores/concrete/MlmVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/MlmVectorStore.py index ea5902602..e3e19abf9 100644 --- a/pkgs/swarmauri/swarmauri/vector_stores/concrete/MlmVectorStore.py +++ b/pkgs/community/swarmauri_community/vector_stores/MlmVectorStore.py @@ -1,6 +1,6 @@ from typing import List, Union, Literal from swarmauri.documents.concrete.Document import Document -from swarmauri.embeddings.concrete.MlmEmbedding import MlmEmbedding +from pkgs.community.swarmauri_community.embeddings.concrete.MlmEmbedding import MlmEmbedding from swarmauri.distances.concrete.CosineDistance import CosineDistance from swarmauri.vector_stores.base.VectorStoreBase import VectorStoreBase diff --git a/pkgs/swarmauri/tests/unit/embeddings/MlmEmbedding_unit_test.py b/pkgs/community/tests/unit/embeddings/MlmEmbedding_unit_test.py similarity index 85% rename from pkgs/swarmauri/tests/unit/embeddings/MlmEmbedding_unit_test.py rename to pkgs/community/tests/unit/embeddings/MlmEmbedding_unit_test.py index 6962bb802..399318ebd 100644 --- a/pkgs/swarmauri/tests/unit/embeddings/MlmEmbedding_unit_test.py +++ b/pkgs/community/tests/unit/embeddings/MlmEmbedding_unit_test.py @@ -1,5 +1,5 @@ import pytest -from swarmauri.embeddings.concrete.MlmEmbedding import MlmEmbedding +from pkgs.community.swarmauri_community.embeddings.concrete.MlmEmbedding import MlmEmbedding @pytest.mark.unit def test_ubc_resource(): diff --git a/pkgs/swarmauri/tests/unit/vector_stores/MlmVectorStore_unit_test.py b/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py similarity index 89% rename from pkgs/swarmauri/tests/unit/vector_stores/MlmVectorStore_unit_test.py rename to pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py index 06b0fa263..27e300fe7 100644 --- a/pkgs/swarmauri/tests/unit/vector_stores/MlmVectorStore_unit_test.py +++ b/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py @@ -1,6 +1,6 @@ import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri.vector_stores.concrete.MlmVectorStore import MlmVectorStore +from pkgs.community.swarmauri_community.vector_stores.MlmVectorStore import MlmVectorStore @pytest.mark.unit diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index fb20c59d9..04eb35465 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -42,10 +42,10 @@ beautifulsoup4 = { version = "04.12.3", optional = true } scipy = { version = ">=1.7.0,<1.14.0", optional = true } #scikit-learn = { version = "^1.4.2", optional = true } #spacy = { version = ">=3.0.0,<=3.8.2", optional = true } -transformers = { version = "^4.45.0", optional = true } -torch = { version = "^2.5.0", optional = true } -keras = { version = ">=3.2.0", optional = true } -tf-keras = { version = ">=2.16.0", optional = true } +#transformers = { version = "^4.45.0", optional = true } +#torch = { version = "^2.5.0", optional = true } +#keras = { version = ">=3.2.0", optional = true } +#tf-keras = { version = ">=2.16.0", optional = true } matplotlib = { version = ">=3.9.2", optional = true } [tool.poetry.extras] @@ -56,9 +56,9 @@ nlp = ["nltk", "textblob", "yake"] nlp_tools = ["beautifulsoup4"] #ml_toolkits = ["gensim", "scipy", "scikit-learn"] #spacy = ["spacy"] -transformers = ["transformers"] -torch = ["torch"] -tensorflow = ["keras", "tf-keras"] +#transformers = ["transformers"] +#torch = ["torch"] +#tensorflow = ["keras", "tf-keras"] visualization = ["matplotlib"] # Full option to install all extras @@ -70,9 +70,9 @@ full = [ "beautifulsoup4", #"gensim", "scipy", "scikit-learn", #"spacy", - "transformers", - "torch", - "keras", "tf-keras", + #"transformers", + #"torch", + #"keras", "tf-keras", "matplotlib" ] diff --git a/pkgs/swarmauri/swarmauri/embeddings/concrete/__init__.py b/pkgs/swarmauri/swarmauri/embeddings/concrete/__init__.py index a1f0f231c..a4fd73974 100644 --- a/pkgs/swarmauri/swarmauri/embeddings/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/embeddings/concrete/__init__.py @@ -11,20 +11,20 @@ def _lazy_import(module_name, module_description=None): return None # Lazy loading of embeddings with descriptive names -Doc2VecEmbedding = _lazy_import("swarmauri.embeddings.concrete.Doc2VecEmbedding", "Doc2VecEmbedding") +# Doc2VecEmbedding = _lazy_import("swarmauri.embeddings.concrete.Doc2VecEmbedding", "Doc2VecEmbedding") GeminiEmbedding = _lazy_import("swarmauri.embeddings.concrete.GeminiEmbedding", "GeminiEmbedding") MistralEmbedding = _lazy_import("swarmauri.embeddings.concrete.MistralEmbedding", "MistralEmbedding") -MlmEmbedding = _lazy_import("swarmauri.embeddings.concrete.MlmEmbedding", "MlmEmbedding") +# MlmEmbedding = _lazy_import("swarmauri.embeddings.concrete.MlmEmbedding", "MlmEmbedding") NmfEmbedding = _lazy_import("swarmauri.embeddings.concrete.NmfEmbedding", "NmfEmbedding") OpenAIEmbedding = _lazy_import("swarmauri.embeddings.concrete.OpenAIEmbedding", "OpenAIEmbedding") TfidfEmbedding = _lazy_import("swarmauri.embeddings.concrete.TfidfEmbedding", "TfidfEmbedding") # Adding lazy-loaded modules to __all__ __all__ = [ - "Doc2VecEmbedding", + # "Doc2VecEmbedding", "GeminiEmbedding", "MistralEmbedding", - "MlmEmbedding", + # "MlmEmbedding", "NmfEmbedding", "OpenAIEmbedding", "TfidfEmbedding", diff --git a/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py b/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py index a150d0de5..a42b1b55f 100644 --- a/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py @@ -13,7 +13,7 @@ def _lazy_import(module_name, module_description=None): # List of parser names (file names without the ".py" extension) parser_files = [ "BeautifulSoupElementParser", - "BERTEmbeddingParser", + # "BERTEmbeddingParser", "CSVParser", "EntityRecognitionParser", "HTMLTagStripParser", @@ -23,7 +23,7 @@ def _lazy_import(module_name, module_description=None): "PhoneNumberExtractorParser", "PythonParser", "RegExParser", - "TextBlobNounParser", + # "TextBlobNounParser", # "TextBlobSentenceParser", "URLExtractorParser", "XMLParser", diff --git a/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py b/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py index 7aee51192..2f946b377 100644 --- a/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py @@ -13,7 +13,7 @@ def _lazy_import(module_name, module_description=None): # List of vector store names (file names without the ".py" extension) vector_store_files = [ # "Doc2VecVectorStore", - "MlmVectorStore", + # "MlmVectorStore", "SqliteVectorStore", "TfidfVectorStore", ] diff --git a/pkgs/swarmauri/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py b/pkgs/swarmauri/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py deleted file mode 100644 index 7f3afc447..000000000 --- a/pkgs/swarmauri/tests/unit/embeddings/Doc2VecEmbedding_unit_test.py +++ /dev/null @@ -1,22 +0,0 @@ -import pytest -from swarmauri_community.embeddings.concrete.Doc2VecEmbedding import Doc2VecEmbedding - -@pytest.mark.unit -def test_ubc_resource(): - assert Doc2VecEmbedding().resource == 'Embedding' - -@pytest.mark.unit -def test_ubc_type(): - assert Doc2VecEmbedding().type == 'Doc2VecEmbedding' - -@pytest.mark.unit -def test_serialization(): - embedder = Doc2VecEmbedding() - assert embedder.id == Doc2VecEmbedding.model_validate_json(embedder.model_dump_json()).id - -@pytest.mark.unit -def test_fit_transform(): - embedder = Doc2VecEmbedding() - documents = ['test', 'cat', 'banana'] - embedder.fit_transform(documents) - assert ['banana', 'cat', 'test'] == embedder.extract_features() \ No newline at end of file diff --git a/pkgs/swarmauri/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py b/pkgs/swarmauri/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py deleted file mode 100644 index 01c8eb634..000000000 --- a/pkgs/swarmauri/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py +++ /dev/null @@ -1,72 +0,0 @@ -import pytest -from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.Doc2VecVectorStore import Doc2VecVectorStore - - -@pytest.mark.unit -def test_ubc_resource(): - vs = Doc2VecVectorStore() - assert vs.resource == "VectorStore" - assert vs.embedder.resource == "Embedding" - - -@pytest.mark.unit -def test_ubc_type(): - vs = Doc2VecVectorStore() - assert vs.type == "Doc2VecVectorStore" - - -@pytest.mark.unit -def test_serialization(): - vs = Doc2VecVectorStore() - assert vs.id == Doc2VecVectorStore.model_validate_json(vs.model_dump_json()).id - - -@pytest.mark.unit -def test_top_k(): - vs = Doc2VecVectorStore() - documents = [ - Document(content="test"), - Document(content="test1"), - Document(content="test2"), - Document(content="test3"), - ] - - vs.add_documents(documents) - assert len(vs.retrieve(query="test", top_k=2)) == 2 - - -@pytest.mark.unit -def test_adding_more_doc(): - vs = Doc2VecVectorStore() - documents_batch_1 = [ - Document(content="test"), - Document(content="test1"), - Document(content="test2"), - Document(content="test3"), - ] - documents_batch_2 = [ - Document(content="This is a test. Test number 4"), - Document(content="This is a test. Test number 5"), - Document(content="This is a test. Test number 6"), - Document(content="This is a test. Test number 7"), - ] - doc_count = len(documents_batch_1) + len(documents_batch_2) - - vs.add_documents(documents_batch_1) - vs.add_documents(documents_batch_2) - assert len(vs.retrieve(query="test", top_k=doc_count)) == doc_count - - -@pytest.mark.unit -def test_oov(): - """Test for Out Of Vocabulary (OOV) words""" - vs = Doc2VecVectorStore() - documents = [ - Document(content="test"), - Document(content="test1"), - Document(content="test2"), - Document(content="test3"), - ] - vs.add_documents(documents) - assert len(vs.retrieve(query="what is test 4", top_k=2)) == 2 From f1cd5e210b1e653947fb366f49881bdb54003a8e Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Wed, 20 Nov 2024 13:39:29 +0100 Subject: [PATCH 08/42] comm - minor fix --- .../swarmauri_community/vector_stores/MlmVectorStore.py | 2 +- pkgs/community/tests/unit/embeddings/MlmEmbedding_unit_test.py | 2 +- .../tests/unit/vector_stores/MlmVectorStore_unit_test.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/community/swarmauri_community/vector_stores/MlmVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/MlmVectorStore.py index e3e19abf9..21f45ec35 100644 --- a/pkgs/community/swarmauri_community/vector_stores/MlmVectorStore.py +++ b/pkgs/community/swarmauri_community/vector_stores/MlmVectorStore.py @@ -1,6 +1,6 @@ from typing import List, Union, Literal from swarmauri.documents.concrete.Document import Document -from pkgs.community.swarmauri_community.embeddings.concrete.MlmEmbedding import MlmEmbedding +from swarmauri_community.embeddings.concrete.MlmEmbedding import MlmEmbedding from swarmauri.distances.concrete.CosineDistance import CosineDistance from swarmauri.vector_stores.base.VectorStoreBase import VectorStoreBase diff --git a/pkgs/community/tests/unit/embeddings/MlmEmbedding_unit_test.py b/pkgs/community/tests/unit/embeddings/MlmEmbedding_unit_test.py index 399318ebd..c015aeac3 100644 --- a/pkgs/community/tests/unit/embeddings/MlmEmbedding_unit_test.py +++ b/pkgs/community/tests/unit/embeddings/MlmEmbedding_unit_test.py @@ -1,5 +1,5 @@ import pytest -from pkgs.community.swarmauri_community.embeddings.concrete.MlmEmbedding import MlmEmbedding +from swarmauri_community.embeddings.concrete.MlmEmbedding import MlmEmbedding @pytest.mark.unit def test_ubc_resource(): diff --git a/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py b/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py index 27e300fe7..5f486aebb 100644 --- a/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py +++ b/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py @@ -1,6 +1,6 @@ import pytest from swarmauri.documents.concrete.Document import Document -from pkgs.community.swarmauri_community.vector_stores.MlmVectorStore import MlmVectorStore +from swarmauri_community.vector_stores.MlmVectorStore import MlmVectorStore @pytest.mark.unit From 719710a6224ebb0d9a55e28ca8b253642a6b0b80 Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Wed, 20 Nov 2024 13:50:18 +0100 Subject: [PATCH 09/42] swarm - reverted unfinished work --- .../swarmauri/tools/base/ToolBase.py | 3 - .../utils/extract_signature_decorator.py | 85 ------------------- 2 files changed, 88 deletions(-) delete mode 100644 pkgs/swarmauri/swarmauri/utils/extract_signature_decorator.py diff --git a/pkgs/swarmauri/swarmauri/tools/base/ToolBase.py b/pkgs/swarmauri/swarmauri/tools/base/ToolBase.py index dedd82244..3dfade766 100644 --- a/pkgs/swarmauri/swarmauri/tools/base/ToolBase.py +++ b/pkgs/swarmauri/swarmauri/tools/base/ToolBase.py @@ -19,9 +19,6 @@ def call(self, *args, **kwargs): @abstractmethod def __call__(self, *args, **kwargs): raise NotImplementedError("Subclasses must implement the __call__ method.") - - def extract_signature_details(self): - pass # #def __getstate__(self): diff --git a/pkgs/swarmauri/swarmauri/utils/extract_signature_decorator.py b/pkgs/swarmauri/swarmauri/utils/extract_signature_decorator.py deleted file mode 100644 index 412d10b90..000000000 --- a/pkgs/swarmauri/swarmauri/utils/extract_signature_decorator.py +++ /dev/null @@ -1,85 +0,0 @@ -from typing import ( - List, - Any, - Union, - Optional, - Callable, - get_type_hints, - get_args, - get_origin, -) -import inspect -from pydantic import BaseModel -from swarmauri.tools.concrete.Parameter import Parameter - - -class MethodSignatureExtractor(BaseModel): - parameters: List[Parameter] = [] - method: Callable - _type_mapping: dict = { - int: "integer", - float: "number", - str: "string", - bool: "boolean", - list: "array", - dict: "object", - Any: "any", - } - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.parameters = self.extract_signature_details() - - def _python_type_to_json_schema_type(self, py_type): - if get_origin(py_type) is not None: - origin = get_origin(py_type) - args = get_args(py_type) - - if origin is list: - items_type = self._python_type_to_json_schema_type(args[0]) - return {"type": "array", "items": items_type} - elif origin is dict: - return {"type": "object"} - elif origin in (Union, Optional): - if len(args) == 2 and type(None) in args: - non_none_type = args[0] if args[1] is type(None) else args[1] - return self._python_type_to_json_schema_type(non_none_type) - return { - "oneOf": [ - self._python_type_to_json_schema_type(arg) for arg in args - ] - } - return {"type": self._type_mapping.get(origin, "string")} - else: - return {"type": self._type_mapping.get(py_type, "string")} - - def extract_signature_details(self): - sig = inspect.signature(self.method) - type_hints = get_type_hints(self.method) - parameters = sig.parameters - details_list = [] - - for param_name, param in parameters.items(): - if param_name == "self": - continue - - param_type = type_hints.get(param_name, Any) - param_default = ( - param.default if param.default is not inspect.Parameter.empty else None - ) - required = param.default is inspect.Parameter.empty - enum = None - param_type_json_schema = self._python_type_to_json_schema_type(param_type) - - description = f"Parameter {param_name} of type {param_type_json_schema}" - - detail = Parameter( - name=param_name, - type=param_type_json_schema["type"], - description=description, - required=required, - enum=enum, - ) - details_list.append(detail) - - return details_list From ab4aca2c475543e96a1aafe589ee06e3c72605c7 Mon Sep 17 00:00:00 2001 From: 3rdSon Date: Wed, 20 Nov 2024 15:08:23 +0100 Subject: [PATCH 10/42] swarm - implemented hyperbolicvision --- .../llms/concrete/HyperbolicVisionModel.py | 378 ++++++++++++++++++ .../llms/HyperbolicVisionModel_unit_test.py | 158 ++++++++ 2 files changed, 536 insertions(+) create mode 100644 pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicVisionModel.py create mode 100644 pkgs/swarmauri/tests/unit/llms/HyperbolicVisionModel_unit_test.py diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicVisionModel.py b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicVisionModel.py new file mode 100644 index 000000000..0dab82ce3 --- /dev/null +++ b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicVisionModel.py @@ -0,0 +1,378 @@ +import json +from pydantic import PrivateAttr +import httpx +from typing import List, Optional, Dict, Literal, Any, AsyncGenerator, Generator +import asyncio + +from swarmauri_core.typing import SubclassUnion +from swarmauri.conversations.concrete.Conversation import Conversation +from swarmauri.messages.base.MessageBase import MessageBase +from swarmauri.messages.concrete.AgentMessage import AgentMessage +from swarmauri.llms.base.LLMBase import LLMBase +from swarmauri.messages.concrete.AgentMessage import UsageData +from swarmauri.utils.retry_decorator import retry_on_status_codes +from swarmauri.utils.file_path_to_base64 import file_path_to_base64 + + +class HyperbolicVisionModel(LLMBase): + """ + HyperbolicVisionModel class for interacting with the Hyperbolic vision language models API. This class + provides synchronous and asynchronous methods to send conversation data to the + model, receive predictions, and stream responses. + + Attributes: + api_key (str): API key for authenticating requests to the Hyperbolic API. + allowed_models (List[str]): List of allowed model names that can be used. + name (str): The default model name to use for predictions. + type (Literal["HyperbolicVisionModel"]): The type identifier for this class. + """ + + api_key: str + allowed_models: List[str] = [ + "Qwen/Qwen2-VL-72B-Instruct", + "mistralai/Pixtral-12B-2409", + "Qwen/Qwen2-VL-7B-Instruct", + ] + name: str = "Qwen/Qwen2-VL-72B-Instruct" + type: Literal["HyperbolicVisionModel"] = "HyperbolicVisionModel" + _headers: Dict[str, str] = PrivateAttr(default=None) + _client: httpx.Client = PrivateAttr(default=None) + _BASE_URL: str = PrivateAttr( + default="https://api.hyperbolic.xyz/v1/chat/completions" + ) + + def __init__(self, **data): + """ + Initialize the HyperbolicVisionModel class with the provided data. + + Args: + **data: Arbitrary keyword arguments containing initialization data. + """ + super().__init__(**data) + self._headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.api_key}", + } + self._client = httpx.Client( + headers=self._headers, + base_url=self._BASE_URL, + ) + + def _format_messages( + self, + messages: List[SubclassUnion[MessageBase]], + ) -> List[Dict[str, Any]]: + """ + Formats conversation messages into the structure expected by the API. + + Args: + messages (List[MessageBase]): List of message objects from the conversation history. + + Returns: + List[Dict[str, Any]]: List of formatted message dictionaries. + """ + formatted_messages = [] + for message in messages: + formatted_message = message.model_dump( + include=["content", "role", "name"], exclude_none=True + ) + + if isinstance(formatted_message["content"], list): + formatted_content = [] + for item in formatted_message["content"]: + if item["type"] == "image_url" and "file_path" in item: + # Convert file path to base64 + base64_img = file_path_to_base64(item["file_path"]) + formatted_content.append( + { + "type": "image_url", + "image_url": { + "url": f"data:image/jpeg;base64,{base64_img}" + }, + } + ) + else: + formatted_content.append(item) + formatted_message["content"] = formatted_content + + formatted_messages.append(formatted_message) + return formatted_messages + + def _prepare_usage_data(self, usage_data) -> UsageData: + """ + Prepares and validates usage data received from the API response. + + Args: + usage_data (dict): Raw usage data from the API response. + + Returns: + UsageData: Validated usage data instance. + """ + return UsageData.model_validate(usage_data) + + @retry_on_status_codes((429, 529), max_retries=1) + def predict( + self, + conversation: Conversation, + temperature: float = 0.7, + max_tokens: int = 2048, + top_p: float = 0.9, + stop: Optional[List[str]] = None, + ) -> Conversation: + """ + Generates a response from the model based on the given conversation. + + Args: + conversation (Conversation): Conversation object with message history. + temperature (float): Sampling temperature for response diversity. + max_tokens (int): Maximum tokens for the model's response. + top_p (float): Cumulative probability for nucleus sampling. + stop (Optional[List[str]]): List of stop sequences for response termination. + + Returns: + Conversation: Updated conversation with the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "max_tokens": max_tokens, + "top_p": top_p, + "stop": stop or [], + } + + response = self._client.post(self._BASE_URL, json=payload) + response.raise_for_status() + + response_data = response.json() + + message_content = response_data["choices"][0]["message"]["content"] + usage_data = response_data.get("usage", {}) + + usage = self._prepare_usage_data(usage_data) + conversation.add_message(AgentMessage(content=message_content, usage=usage)) + return conversation + + @retry_on_status_codes((429, 529), max_retries=1) + async def apredict( + self, + conversation: Conversation, + temperature: float = 0.7, + max_tokens: int = 2048, + top_p: float = 0.9, + stop: Optional[List[str]] = None, + ) -> Conversation: + """ + Async method to generate a response from the model based on the given conversation. + + Args: + conversation (Conversation): Conversation object with message history. + temperature (float): Sampling temperature for response diversity. + max_tokens (int): Maximum tokens for the model's response. + top_p (float): Cumulative probability for nucleus sampling. + stop (Optional[List[str]]): List of stop sequences for response termination. + + Returns: + Conversation: Updated conversation with the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "max_tokens": max_tokens, + "top_p": top_p, + "stop": stop or [], + } + + async with httpx.AsyncClient() as async_client: + response = await async_client.post( + self._BASE_URL, json=payload, headers=self._headers + ) + response.raise_for_status() + + response_data = response.json() + + message_content = response_data["choices"][0]["message"]["content"] + usage_data = response_data.get("usage", {}) + + usage = self._prepare_usage_data(usage_data) + conversation.add_message(AgentMessage(content=message_content, usage=usage)) + return conversation + + @retry_on_status_codes((429, 529), max_retries=1) + def stream( + self, + conversation: Conversation, + temperature: float = 0.7, + max_tokens: int = 2048, + top_p: float = 0.9, + stop: Optional[List[str]] = None, + ) -> Generator[str, None, None]: + """ + Streams response text from the model in real-time. + + Args: + conversation (Conversation): Conversation object with message history. + temperature (float): Sampling temperature for response diversity. + max_tokens (int): Maximum tokens for the model's response. + top_p (float): Cumulative probability for nucleus sampling. + stop (Optional[List[str]]): List of stop sequences for response termination. + + Yields: + str: Partial response content from the model. + """ + formatted_messages = self._format_messages(conversation.history) + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "max_tokens": max_tokens, + "top_p": top_p, + "stream": True, + "stop": stop or [], + } + + response = self._client.post(self._BASE_URL, json=payload) + response.raise_for_status() + + message_content = "" + for line in response.iter_lines(): + json_str = line.replace("data: ", "") + try: + if json_str: + chunk = json.loads(json_str) + if chunk["choices"][0]["delta"]: + delta = chunk["choices"][0]["delta"]["content"] + message_content += delta + yield delta + except json.JSONDecodeError: + pass + + conversation.add_message(AgentMessage(content=message_content)) + + @retry_on_status_codes((429, 529), max_retries=1) + async def astream( + self, + conversation: Conversation, + temperature: float = 0.7, + max_tokens: int = 2048, + top_p: float = 0.9, + stop: Optional[List[str]] = None, + ) -> AsyncGenerator[str, None]: + """ + Async generator that streams response text from the model in real-time. + + Args: + conversation (Conversation): Conversation object with message history. + temperature (float): Sampling temperature for response diversity. + max_tokens (int): Maximum tokens for the model's response. + top_p (float): Cumulative probability for nucleus sampling. + stop (Optional[List[str]]): List of stop sequences for response termination. + + Yields: + str: Partial response content from the model. + """ + formatted_messages = self._format_messages(conversation.history) + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "max_tokens": max_tokens, + "top_p": top_p, + "stream": True, + "stop": stop or [], + } + + async with httpx.AsyncClient as async_client: + response = await async_client.post( + self._BASE_URL, json=payload, headers=self._headers + ) + response.raise_for_status() + + message_content = "" + async for line in response.aiter_lines(): + json_str = line.replace("data: ", "") + try: + if json_str: + chunk = json.loads(json_str) + if chunk["choices"][0]["delta"]: + delta = chunk["choices"][0]["delta"]["content"] + message_content += delta + yield delta + except json.JSONDecodeError: + pass + + conversation.add_message(AgentMessage(content=message_content)) + + def batch( + self, + conversations: List[Conversation], + temperature: float = 0.7, + max_tokens: int = 2048, + top_p: float = 0.9, + stop: Optional[List[str]] = None, + ) -> List[Conversation]: + """ + Processes a batch of conversations and generates responses for each sequentially. + + Args: + conversations (List[Conversation]): List of conversations to process. + temperature (float): Sampling temperature for response diversity. + max_tokens (int): Maximum tokens for each response. + top_p (float): Cumulative probability for nucleus sampling. + stop (Optional[List[str]]): List of stop sequences for response termination. + + Returns: + List[Conversation]: List of updated conversations with model responses. + """ + results = [] + for conversation in conversations: + result_conversation = self.predict( + conversation, + temperature=temperature, + max_tokens=max_tokens, + top_p=top_p, + stop=stop, + ) + results.append(result_conversation) + return results + + async def abatch( + self, + conversations: List[Conversation], + temperature: float = 0.7, + max_tokens: int = 2048, + top_p: float = 0.9, + stop: Optional[List[str]] = None, + max_concurrent=5, + ) -> List[Conversation]: + """ + Async method for processing a batch of conversations concurrently. + + Args: + conversations (List[Conversation]): List of conversations to process. + temperature (float): Sampling temperature for response diversity. + max_tokens (int): Maximum tokens for each response. + top_p (float): Cumulative probability for nucleus sampling. + stop (Optional[List[str]]): List of stop sequences for response termination. + max_concurrent (int): Maximum number of concurrent requests. + + Returns: + List[Conversation]: List of updated conversations with model responses. + """ + semaphore = asyncio.Semaphore(max_concurrent) + + async def process_conversation(conv: Conversation) -> Conversation: + async with semaphore: + return await self.apredict( + conv, + temperature=temperature, + max_tokens=max_tokens, + top_p=top_p, + stop=stop, + ) + + tasks = [process_conversation(conv) for conv in conversations] + return await asyncio.gather(*tasks) diff --git a/pkgs/swarmauri/tests/unit/llms/HyperbolicVisionModel_unit_test.py b/pkgs/swarmauri/tests/unit/llms/HyperbolicVisionModel_unit_test.py new file mode 100644 index 000000000..495341aae --- /dev/null +++ b/pkgs/swarmauri/tests/unit/llms/HyperbolicVisionModel_unit_test.py @@ -0,0 +1,158 @@ +import pytest +import os +from swarmauri.llms.concrete.HyperbolicVisionModel import HyperbolicVisionModel +from swarmauri.conversations.concrete.Conversation import Conversation +from swarmauri.messages.concrete.HumanMessage import HumanMessage +from dotenv import load_dotenv +from swarmauri.utils.timeout_wrapper import timeout + +load_dotenv() + +API_KEY = os.getenv("HYPERBOLIC_API_KEY") + + +@pytest.fixture(scope="module") +def hyperbolic_vision_model(): + if not API_KEY: + pytest.skip("Skipping due to environment variable not set") + model = HyperbolicVisionModel(api_key=API_KEY) + return model + + +def get_allowed_models(): + if not API_KEY: + return [] + model = HyperbolicVisionModel(api_key=API_KEY) + return model.allowed_models + + +@timeout(5) +@pytest.mark.unit +def test_ubc_resource(hyperbolic_vision_model): + assert hyperbolic_vision_model.resource == "LLM" + + +@timeout(5) +@pytest.mark.unit +def test_ubc_type(hyperbolic_vision_model): + assert hyperbolic_vision_model.type == "HyperbolicVisionModel" + + +@timeout(5) +@pytest.mark.unit +def test_serialization(hyperbolic_vision_model): + assert ( + hyperbolic_vision_model.id + == HyperbolicVisionModel.model_validate_json( + hyperbolic_vision_model.model_dump_json() + ).id + ) + + +@timeout(5) +@pytest.mark.unit +def test_default_model_name(hyperbolic_vision_model): + assert hyperbolic_vision_model.name == "Qwen/Qwen2-VL-72B-Instruct" + + +def create_test_conversation(image_url, prompt): + conversation = Conversation() + conversation.add_message( + HumanMessage( + content=[ + {"type": "text", "text": prompt}, + {"type": "image_url", "image_url": {"url": image_url}}, + ] + ) + ) + return conversation + + +@pytest.mark.parametrize("model_name", get_allowed_models()) +@timeout(5) +@pytest.mark.unit +def test_predict(hyperbolic_vision_model, model_name): + model = hyperbolic_vision_model + model.name = model_name + + image_url = "https://llava-vl.github.io/static/images/monalisa.jpg" + prompt = "Who painted this artwork?" + conversation = create_test_conversation(image_url, prompt) + + result = model.predict(conversation) + + assert result.history[-1].content is not None + assert isinstance(result.history[-1].content, str) + assert len(result.history[-1].content) > 0 + + +@pytest.mark.asyncio +@pytest.mark.parametrize("model_name", get_allowed_models()) +@timeout(5) +@pytest.mark.unit +async def test_apredict(hyperbolic_vision_model, model_name): + model = hyperbolic_vision_model + model.name = model_name + + image_url = "https://llava-vl.github.io/static/images/monalisa.jpg" + prompt = "Describe the woman in the painting." + conversation = create_test_conversation(image_url, prompt) + + result = await model.apredict(conversation) + + assert result.history[-1].content is not None + assert isinstance(result.history[-1].content, str) + assert len(result.history[-1].content) > 0 + + +@timeout(5) +@pytest.mark.unit +def test_batch(hyperbolic_vision_model): + image_urls = [ + "https://llava-vl.github.io/static/images/monalisa.jpg", + "https://llava-vl.github.io/static/images/monalisa.jpg", + ] + prompts = [ + "Who painted this artwork?", + "Describe the woman in the painting.", + ] + + conversations = [ + create_test_conversation(image_url, prompt) + for image_url, prompt in zip(image_urls, prompts) + ] + + results = hyperbolic_vision_model.batch(conversations) + + assert len(results) == len(image_urls) + for result in results: + assert result.history[-1].content is not None + assert isinstance(result.history[-1].content, str) + assert len(result.history[-1].content) > 0 + + +@pytest.mark.asyncio +@timeout(5) +@pytest.mark.unit +async def test_abatch(hyperbolic_vision_model): + image_urls = [ + "https://llava-vl.github.io/static/images/monalisa.jpg", + "https://llava-vl.github.io/static/images/monalisa.jpg", + ] + prompts = [ + "Who painted this artwork?", + "Describe the woman in the painting.", + ] + + conversations = [ + create_test_conversation(image_url, prompt) + for image_url, prompt in zip(image_urls, prompts) + ] + + results = await hyperbolic_vision_model.abatch(conversations) + + assert len(results) == len(image_urls) + for result in results: + assert result.history[-1].content is not None + assert isinstance(result.history[-1].content, str) + assert len(result.history[-1].content) > 0 From a17005800b6411640bba6f7bae1a3358c500b1ef Mon Sep 17 00:00:00 2001 From: 3rdSon Date: Wed, 20 Nov 2024 15:11:45 +0100 Subject: [PATCH 11/42] swarm - hyperbolic --- .../swarmauri/swarmauri/llms/concrete/HyperbolicVisionModel.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicVisionModel.py b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicVisionModel.py index 0dab82ce3..14e2d196a 100644 --- a/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicVisionModel.py +++ b/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicVisionModel.py @@ -25,6 +25,9 @@ class HyperbolicVisionModel(LLMBase): allowed_models (List[str]): List of allowed model names that can be used. name (str): The default model name to use for predictions. type (Literal["HyperbolicVisionModel"]): The type identifier for this class. + + Link to Allowed Models: https://app.hyperbolic.xyz/models + Link to API KEYS: https://app.hyperbolic.xyz/settings """ api_key: str From 4421b1a293cf4a706e0020c9cfafbcac552287c8 Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Wed, 20 Nov 2024 15:55:39 +0100 Subject: [PATCH 12/42] comm - refactor: update dependencies and add optional extras in pyproject.toml --- pkgs/community/pyproject.toml | 87 +++++++++++++++++++---------------- 1 file changed, 48 insertions(+), 39 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index b9b8d8665..3d60c38ad 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -15,48 +15,59 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.10,<3.13" -captcha = "*" -chromadb = "*" -duckdb = "*" -folium = "*" -gensim = "*" -#google-generativeai = "*" -gradio = "*" -leptonai = "0.22.0" -neo4j = "*" -nltk = "*" -#openai = "^1.52.0" -pandas = "*" -psutil = "*" -pygithub = "*" -python-dotenv = "*" -qrcode = "*" -redis = "^4.0" -#scikit-learn="^1.4.2" swarmauri = "==0.5.2" -textstat = "*" -transformers = ">=4.45.0" typing_extensions = "*" -tiktoken = "*" -pymupdf = "*" -annoy = "*" -qdrant_client = "*" -weaviate = "*" -pinecone-client = { version = "*", extras = ["grpc"] } -PyPDF2 = "*" -pypdftk = "*" -weaviate-client = "*" -protobuf = "^3.20.0" -# Pacmap requires specific version of numba -#numba = ">=0.59.0" -#pacmap = "==0.7.3" +matplotlib = { version = ">=3.9.2", optional = true } +nltk = { version = "^3.9.1", optional = true } +gensim = { version = "==4.3.3", optional = true } +transformers = { version = "^4.45.0", optional = true } +spacy = { version = ">=3.0.0,<=3.8.2", optional = true } +textblob = { version = "^0.18.0", optional = true } +torch = { version = "^2.5.0", optional = true } +leptonai = { version = "==0.22.0", optional = true } +redis = { version = "^4.0", optional = true } +#protobuf = { version = "^3.20.0", optional = true } +#numba = { version = ">=0.59.0", optional = true } +#pacmap = { version = "==0.7.3", optional = true } +[tool.poetry.extras] +# Grouped optional dependencies +nlp = ["nltk", "textblob", "textstat", "gensim"] +ml_toolkits = ["transformers", "annoy"] +visualization = ["folium", "matplotlib"] +storage = ["redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate-client", "pinecone-client[grpc]"] +document_processing = ["PyPDF2", "pymupdf", "pypdftk"] +cloud_integration = ["psutil", "qrcode", "pygithub"] +spacy = ["spacy"] +transformers = ["transformers"] +torch = ["torch"] +gradio = ["gradio"] +model_clients = ["leptonai", "google-generativeai", "openai"] +tiktoken = ["tiktoken"] +#protobuf = ["protobuf"] +#pacmap = ["numba", "pacmap"] + +# Full installation +full = [ + "nltk", "gensim", "textstat", + "transformers", "annoy", + "folium", "matplotlib", + "redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate-client", "pinecone-client[grpc]", + "PyPDF2", "pymupdf", "pypdftk", + "psutil", "qrcode", "pygithub", + "scipy", "spacy", + "torch", + "gradio", + "leptonai", "google-generativeai", "openai" + #"pacmap", "numba" +] [tool.poetry.dev-dependencies] -flake8 = "^7.0" # Add flake8 as a development dependency -pytest = "^8.0" # Ensure pytest is also added if you run tests +flake8 = "^7.0" +pytest = "^8.0" pytest-asyncio = ">=0.24.0" pytest-xdist = "^3.6.1" +python-dotenv = "^1.0.0" [build-system] requires = ["poetry-core>=1.0.0"] @@ -70,12 +81,10 @@ markers = [ "unit: Unit tests", "integration: Integration tests", "acceptance: Acceptance tests", - "experimental: Experimental tests", + "experimental: Experimental tests" ] - log_cli = true log_cli_level = "INFO" log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" log_cli_date_format = "%Y-%m-%d %H:%M:%S" - -asyncio_default_fixture_loop_scope = "function" +asyncio_default_fixture_loop_scope = "function" \ No newline at end of file From 2b05d476ee825bd1b7f21112392a5dc64717434c Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Wed, 20 Nov 2024 16:21:14 +0100 Subject: [PATCH 13/42] comm - minor fix --- pkgs/community/pyproject.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index 3d60c38ad..332deb6bd 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -26,6 +26,7 @@ textblob = { version = "^0.18.0", optional = true } torch = { version = "^2.5.0", optional = true } leptonai = { version = "==0.22.0", optional = true } redis = { version = "^4.0", optional = true } +pinecone-client = { version = ">=2.0.0", optional = true, extras = ["grpc"] } #protobuf = { version = "^3.20.0", optional = true } #numba = { version = ">=0.59.0", optional = true } #pacmap = { version = "==0.7.3", optional = true } @@ -35,7 +36,7 @@ redis = { version = "^4.0", optional = true } nlp = ["nltk", "textblob", "textstat", "gensim"] ml_toolkits = ["transformers", "annoy"] visualization = ["folium", "matplotlib"] -storage = ["redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate-client", "pinecone-client[grpc]"] +storage = ["redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate-client", "pinecone-client"] document_processing = ["PyPDF2", "pymupdf", "pypdftk"] cloud_integration = ["psutil", "qrcode", "pygithub"] spacy = ["spacy"] @@ -52,7 +53,7 @@ full = [ "nltk", "gensim", "textstat", "transformers", "annoy", "folium", "matplotlib", - "redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate-client", "pinecone-client[grpc]", + "redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate-client", "pinecone-client", "PyPDF2", "pymupdf", "pypdftk", "psutil", "qrcode", "pygithub", "scipy", "spacy", From feecdc1dcc3c9c0aeec3204949e3b5d70cbb0fe1 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Wed, 20 Nov 2024 16:19:56 -0600 Subject: [PATCH 14/42] Update ImageGenBase.py --- pkgs/swarmauri/swarmauri/image_gens/base/ImageGenBase.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/swarmauri/swarmauri/image_gens/base/ImageGenBase.py b/pkgs/swarmauri/swarmauri/image_gens/base/ImageGenBase.py index e9fbda41e..ab3dff1f2 100644 --- a/pkgs/swarmauri/swarmauri/image_gens/base/ImageGenBase.py +++ b/pkgs/swarmauri/swarmauri/image_gens/base/ImageGenBase.py @@ -9,7 +9,7 @@ class ImageGenBase(IGenImage, ComponentBase): allowed_models: List[str] = [] resource: Optional[str] = Field(default=ResourceTypes.IMAGE_GEN.value, frozen=True) model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True) - type: Literal["LLMBase"] = "LLMBase" + type: Literal["ImageGenBase"] = "ImageGenBase" @model_validator(mode="after") @classmethod From 2147e9265f51108e98078726f2184c8b4960fa7c Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:29:02 +0100 Subject: [PATCH 15/42] swarm - moved image gen models --- .../concrete/BlackForestImgGenModel.py | 7 +- .../concrete/DeepInfraImgGenModel.py | 8 +- .../concrete/FalAIImgGenModel.py | 8 +- .../concrete/HyperbolicImgGenModel.py | 12 +- .../concrete/OpenAIImgGenModel.py | 8 +- .../llms/concrete/BlackForestImgGenModel.py | 259 ------------------ .../DeepInfraImgGenModel_unit_test.py | 2 +- .../FalAIImgGenModel_unit_test.py | 2 +- .../HyperbolicImgGenModel_unit_test.py | 2 +- .../OpenAIImgGenModel_unit_tesst.py | 2 +- .../llms/BlackForestImgGenModel_unit_test.py | 120 -------- 11 files changed, 25 insertions(+), 405 deletions(-) rename pkgs/swarmauri/swarmauri/{llms => image_gens}/concrete/DeepInfraImgGenModel.py (97%) rename pkgs/swarmauri/swarmauri/{llms => image_gens}/concrete/FalAIImgGenModel.py (98%) rename pkgs/swarmauri/swarmauri/{llms => image_gens}/concrete/HyperbolicImgGenModel.py (96%) rename pkgs/swarmauri/swarmauri/{llms => image_gens}/concrete/OpenAIImgGenModel.py (97%) delete mode 100644 pkgs/swarmauri/swarmauri/llms/concrete/BlackForestImgGenModel.py rename pkgs/swarmauri/tests/unit/{llms => image_gens}/DeepInfraImgGenModel_unit_test.py (97%) rename pkgs/swarmauri/tests/unit/{llms => image_gens}/FalAIImgGenModel_unit_test.py (97%) rename pkgs/swarmauri/tests/unit/{llms => image_gens}/HyperbolicImgGenModel_unit_test.py (97%) rename pkgs/swarmauri/tests/unit/{llms => image_gens}/OpenAIImgGenModel_unit_tesst.py (97%) delete mode 100644 pkgs/swarmauri/tests/unit/llms/BlackForestImgGenModel_unit_test.py diff --git a/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py b/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py index c697764ff..202d5c631 100644 --- a/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py +++ b/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py @@ -1,6 +1,6 @@ import httpx import time -from typing import List, Literal, Optional, Dict, ClassVar +from typing import List, Literal, Optional, Dict from pydantic import PrivateAttr from swarmauri.utils.retry_decorator import retry_on_status_codes from swarmauri.image_gens.base.ImageGenBase import ImageGenBase @@ -21,15 +21,14 @@ class BlackForestImgGenModel(ImageGenBase): api_key: str allowed_models: List[str] = ["flux-pro-1.1", "flux-pro", "flux-dev"] - asyncio: ClassVar = asyncio name: str = "flux-pro" # Default model type: Literal["BlackForestImgGenModel"] = "BlackForestImgGenModel" - def __init__(self, **data): + def __init__(self, **kwargs): """ Initializes the BlackForestImgGenModel instance with HTTP clients. """ - super().__init__(**data) + super().__init__(**kwargs) self._headers = { "Content-Type": "application/json", "X-Key": self.api_key, diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/DeepInfraImgGenModel.py b/pkgs/swarmauri/swarmauri/image_gens/concrete/DeepInfraImgGenModel.py similarity index 97% rename from pkgs/swarmauri/swarmauri/llms/concrete/DeepInfraImgGenModel.py rename to pkgs/swarmauri/swarmauri/image_gens/concrete/DeepInfraImgGenModel.py index 56afc2105..5dcc35d98 100644 --- a/pkgs/swarmauri/swarmauri/llms/concrete/DeepInfraImgGenModel.py +++ b/pkgs/swarmauri/swarmauri/image_gens/concrete/DeepInfraImgGenModel.py @@ -2,12 +2,12 @@ from typing import List, Literal from pydantic import PrivateAttr from swarmauri.utils.retry_decorator import retry_on_status_codes -from swarmauri.llms.base.LLMBase import LLMBase +from swarmauri.image_gens.base.ImageGenBase import ImageGenBase import asyncio import contextlib -class DeepInfraImgGenModel(LLMBase): +class DeepInfraImgGenModel(ImageGenBase): """ A model class for generating images from text prompts using DeepInfra's image generation API. @@ -37,7 +37,7 @@ class DeepInfraImgGenModel(LLMBase): name: str = "stabilityai/stable-diffusion-2-1" # Default model type: Literal["DeepInfraImgGenModel"] = "DeepInfraImgGenModel" - def __init__(self, **data): + def __init__(self, **kwargs): """ Initializes the DeepInfraImgGenModel instance. @@ -47,7 +47,7 @@ def __init__(self, **data): Args: **data: Keyword arguments for model initialization. """ - super().__init__(**data) + super().__init__(**kwargs) self._headers = { "Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}", diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/FalAIImgGenModel.py b/pkgs/swarmauri/swarmauri/image_gens/concrete/FalAIImgGenModel.py similarity index 98% rename from pkgs/swarmauri/swarmauri/llms/concrete/FalAIImgGenModel.py rename to pkgs/swarmauri/swarmauri/image_gens/concrete/FalAIImgGenModel.py index 6943d1e59..b6eadb3b7 100644 --- a/pkgs/swarmauri/swarmauri/llms/concrete/FalAIImgGenModel.py +++ b/pkgs/swarmauri/swarmauri/image_gens/concrete/FalAIImgGenModel.py @@ -3,11 +3,11 @@ from typing import List, Literal, Optional, Dict from pydantic import Field, PrivateAttr from swarmauri.utils.retry_decorator import retry_on_status_codes -from swarmauri.llms.base.LLMBase import LLMBase +from swarmauri.image_gens.base.ImageGenBase import ImageGenBase import time -class FalAIImgGenModel(LLMBase): +class FalAIImgGenModel(ImageGenBase): """ A model class for generating images from text using FluxPro's image generation model, provided by FalAI. This class uses a queue-based API to handle image generation requests. @@ -34,7 +34,7 @@ class FalAIImgGenModel(LLMBase): max_retries: int = Field(default=60) # Maximum number of status check retries retry_delay: float = Field(default=1.0) # Delay between status checks in seconds - def __init__(self, **data): + def __init__(self, **kwargs): """ Initializes the model with the specified API key and model name. @@ -44,7 +44,7 @@ def __init__(self, **data): Raises: ValueError: If an invalid model name is provided. """ - super().__init__(**data) + super().__init__(**kwargs) self._headers = { "Content-Type": "application/json", "Authorization": f"Key {self.api_key}", diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicImgGenModel.py b/pkgs/swarmauri/swarmauri/image_gens/concrete/HyperbolicImgGenModel.py similarity index 96% rename from pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicImgGenModel.py rename to pkgs/swarmauri/swarmauri/image_gens/concrete/HyperbolicImgGenModel.py index 72d6e8267..43f7dd60b 100644 --- a/pkgs/swarmauri/swarmauri/llms/concrete/HyperbolicImgGenModel.py +++ b/pkgs/swarmauri/swarmauri/image_gens/concrete/HyperbolicImgGenModel.py @@ -1,13 +1,13 @@ import httpx -from typing import List, Literal, Optional +from typing import List, Literal from pydantic import PrivateAttr from swarmauri.utils.retry_decorator import retry_on_status_codes -from swarmauri.llms.base.LLMBase import LLMBase +from swarmauri.image_gens.base.ImageGenBase import ImageGenBase import asyncio import contextlib -class HyperbolicImgGenModel(LLMBase): +class HyperbolicImgGenModel(ImageGenBase): """ A model class for generating images from text prompts using Hyperbolic's image generation API. @@ -29,7 +29,7 @@ class HyperbolicImgGenModel(LLMBase): """ _BASE_URL: str = PrivateAttr("https://api.hyperbolic.xyz/v1/image/generation") - _client: httpx.Client = PrivateAttr() + _client: httpx.Client = PrivateAttr(default=None) _async_client: httpx.AsyncClient = PrivateAttr(default=None) api_key: str @@ -52,7 +52,7 @@ class HyperbolicImgGenModel(LLMBase): enable_refiner: bool = False backend: str = "auto" - def __init__(self, **data): + def __init__(self, **kwargs): """ Initializes the HyperbolicImgGenModel instance. @@ -62,7 +62,7 @@ def __init__(self, **data): Args: **data: Keyword arguments for model initialization. """ - super().__init__(**data) + super().__init__(**kwargs) self._headers = { "Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}", diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/OpenAIImgGenModel.py b/pkgs/swarmauri/swarmauri/image_gens/concrete/OpenAIImgGenModel.py similarity index 97% rename from pkgs/swarmauri/swarmauri/llms/concrete/OpenAIImgGenModel.py rename to pkgs/swarmauri/swarmauri/image_gens/concrete/OpenAIImgGenModel.py index ad78fd7d8..8862799e5 100644 --- a/pkgs/swarmauri/swarmauri/llms/concrete/OpenAIImgGenModel.py +++ b/pkgs/swarmauri/swarmauri/image_gens/concrete/OpenAIImgGenModel.py @@ -2,11 +2,11 @@ import asyncio import httpx from typing import Dict, List, Literal, Optional +from swarmauri.image_gens.base.ImageGenBase import ImageGenBase from swarmauri.utils.retry_decorator import retry_on_status_codes -from swarmauri.llms.base.LLMBase import LLMBase -class OpenAIImgGenModel(LLMBase): +class OpenAIImgGenModel(ImageGenBase): """ OpenAIImgGenModel is a class for generating images using OpenAI's DALL-E models. @@ -26,14 +26,14 @@ class OpenAIImgGenModel(LLMBase): _BASE_URL: str = PrivateAttr(default="https://api.openai.com/v1/images/generations") _headers: Dict[str, str] = PrivateAttr(default=None) - def __init__(self, **data) -> None: + def __init__(self, **kwargs) -> None: """ Initialize the GroqAIAudio class with the provided data. Args: **data: Arbitrary keyword arguments containing initialization data. """ - super().__init__(**data) + super().__init__(**kwargs) self._headers = { "Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json", diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/BlackForestImgGenModel.py b/pkgs/swarmauri/swarmauri/llms/concrete/BlackForestImgGenModel.py deleted file mode 100644 index 50d395394..000000000 --- a/pkgs/swarmauri/swarmauri/llms/concrete/BlackForestImgGenModel.py +++ /dev/null @@ -1,259 +0,0 @@ -import httpx -import time -from typing import List, Literal, Optional, Dict, ClassVar -from pydantic import PrivateAttr -from swarmauri.utils.retry_decorator import retry_on_status_codes -from swarmauri.llms.base.LLMBase import LLMBase -import asyncio -import contextlib - - -class BlackForestImgGenModel(LLMBase): - """ - A model for generating images using FluxPro's image generation models through the Black Forest API. - Link to API key: https://api.bfl.ml/auth/profile - """ - - _BASE_URL: str = PrivateAttr("https://api.bfl.ml") - _client: httpx.Client = PrivateAttr() - _async_client: httpx.AsyncClient = PrivateAttr(default=None) - - api_key: str - allowed_models: List[str] = ["flux-pro-1.1", "flux-pro", "flux-dev"] - - asyncio: ClassVar = asyncio - name: str = "flux-pro" # Default model - type: Literal["BlackForestImgGenModel"] = "BlackForestImgGenModel" - - def __init__(self, **data): - """ - Initializes the BlackForestImgGenModel instance with HTTP clients. - """ - super().__init__(**data) - self._headers = { - "Content-Type": "application/json", - "X-Key": self.api_key, - } - self._client = httpx.Client(headers=self._headers, timeout=30) - - async def _get_async_client(self) -> httpx.AsyncClient: - """Gets or creates an async client instance.""" - if self._async_client is None or self._async_client.is_closed: - self._async_client = httpx.AsyncClient(headers=self._headers, timeout=30) - return self._async_client - - async def _close_async_client(self): - """Closes the async client if it exists and is open.""" - if self._async_client is not None and not self._async_client.is_closed: - await self._async_client.aclose() - self._async_client = None - - @retry_on_status_codes((429, 529), max_retries=1) - def _send_request(self, endpoint: str, data: dict) -> dict: - """Send a synchronous request to FluxPro's API for image generation.""" - url = f"{self._BASE_URL}/{endpoint}" - response = self._client.post(url, json=data) - response.raise_for_status() - return response.json() - - @retry_on_status_codes((429, 529), max_retries=1) - async def _async_send_request(self, endpoint: str, data: dict) -> dict: - """Send an asynchronous request to FluxPro's API for image generation.""" - client = await self._get_async_client() - url = f"{self._BASE_URL}/{endpoint}" - response = await client.post(url, json=data) - response.raise_for_status() - return response.json() - - @retry_on_status_codes((429, 529), max_retries=1) - def _get_result(self, task_id: str) -> dict: - """Get the result of a generation task synchronously.""" - url = f"{self._BASE_URL}/v1/get_result" - params = {"id": task_id} - response = self._client.get(url, params=params) - response.raise_for_status() - return response.json() - - @retry_on_status_codes((429, 529), max_retries=1) - async def _async_get_result(self, task_id: str) -> dict: - """Get the result of a generation task asynchronously.""" - client = await self._get_async_client() - url = f"{self._BASE_URL}/v1/get_result" - params = {"id": task_id} - response = await client.get(url, params=params) - response.raise_for_status() - return response.json() - - def generate_image( - self, - prompt: str, - width: int = 1024, - height: int = 768, - steps: Optional[int] = None, - prompt_upsampling: bool = False, - seed: Optional[int] = None, - guidance: Optional[float] = None, - safety_tolerance: Optional[int] = None, - interval: Optional[float] = None, - max_wait_time: int = 300, - check_interval: int = 10, - ) -> Dict: - """ - Generates an image based on the prompt and waits for the result synchronously. - - Args: - prompt (str): The text prompt for image generation - width (int): Image width in pixels - height (int): Image height in pixels - steps (Optional[int]): Number of inference steps - prompt_upsampling (bool): Whether to use prompt upsampling - seed (Optional[int]): Random seed for generation - guidance (Optional[float]): Guidance scale - safety_tolerance (Optional[int]): Safety tolerance level - interval (Optional[float]): Interval parameter (flux-pro only) - max_wait_time (int): Maximum time to wait for result in seconds - check_interval (int): Time between status checks in seconds - - Returns: - Dict: Dictionary containing the image URL and other result information - """ - endpoint = f"v1/{self.name}" - data = { - "prompt": prompt, - "width": width, - "height": height, - "prompt_upsampling": prompt_upsampling, - } - - if steps is not None: - data["steps"] = steps - if seed is not None: - data["seed"] = seed - if guidance is not None: - data["guidance"] = guidance - if safety_tolerance is not None: - data["safety_tolerance"] = safety_tolerance - if interval is not None and self.name == "flux-pro": - data["interval"] = interval - - response = self._send_request(endpoint, data) - task_id = response["id"] - - start_time = time.time() - while time.time() - start_time < max_wait_time: - result = self._get_result(task_id) - if result["status"] == "Ready": - return result["result"]["sample"] - elif result["status"] in [ - "Error", - "Request Moderated", - "Content Moderated", - ]: - raise Exception(f"Task failed with status: {result['status']}") - time.sleep(check_interval) - - raise TimeoutError(f"Image generation timed out after {max_wait_time} seconds") - - async def agenerate_image(self, prompt: str, **kwargs) -> Dict: - """ - Asynchronously generates an image based on the prompt and waits for the result. - - Args: - prompt (str): The text prompt for image generation - **kwargs: Additional arguments passed to generate_image - - Returns: - Dict: Dictionary containing the image URL and other result information - """ - try: - endpoint = f"v1/{self.name}" - data = { - "prompt": prompt, - "width": kwargs.get("width", 1024), - "height": kwargs.get("height", 768), - "prompt_upsampling": kwargs.get("prompt_upsampling", False), - } - - optional_params = [ - "steps", - "seed", - "guidance", - "safety_tolerance", - ] - for param in optional_params: - if param in kwargs: - data[param] = kwargs[param] - - if "interval" in kwargs and self.name == "flux-pro": - data["interval"] = kwargs["interval"] - - response = await self._async_send_request(endpoint, data) - task_id = response["id"] - - max_wait_time = kwargs.get("max_wait_time", 300) - check_interval = kwargs.get("check_interval", 10) - start_time = time.time() - - while time.time() - start_time < max_wait_time: - result = await self._async_get_result(task_id) - if result["status"] == "Ready": - return result["result"]["sample"] - elif result["status"] in [ - "Error", - "Request Moderated", - "Content Moderated", - ]: - raise Exception(f"Task failed with status: {result['status']}") - await asyncio.sleep(check_interval) - - raise TimeoutError( - f"Image generation timed out after {max_wait_time} seconds" - ) - finally: - await self._close_async_client() - - def batch_generate(self, prompts: List[str], **kwargs) -> List[Dict]: - """ - Generates images for a batch of prompts synchronously. - - Args: - prompts (List[str]): List of text prompts - **kwargs: Additional arguments passed to generate_image - - Returns: - List[Dict]: List of result dictionaries - """ - return [self.generate_image(prompt=prompt, **kwargs) for prompt in prompts] - - async def abatch_generate( - self, prompts: List[str], max_concurrent: int = 5, **kwargs - ) -> List[Dict]: - """ - Asynchronously generates images for a batch of prompts. - - Args: - prompts (List[str]): List of text prompts - max_concurrent (int): Maximum number of concurrent tasks - **kwargs: Additional arguments passed to agenerate_image - - Returns: - List[Dict]: List of result dictionaries - """ - try: - semaphore = asyncio.Semaphore(max_concurrent) - - async def process_prompt(prompt): - async with semaphore: - return await self.agenerate_image(prompt=prompt, **kwargs) - - tasks = [process_prompt(prompt) for prompt in prompts] - return await asyncio.gather(*tasks) - finally: - await self._close_async_client() - - def __del__(self): - """Cleanup method to ensure clients are closed.""" - self._client.close() - if self._async_client is not None and not self._async_client.is_closed: - with contextlib.suppress(Exception): - asyncio.run(self._close_async_client()) diff --git a/pkgs/swarmauri/tests/unit/llms/DeepInfraImgGenModel_unit_test.py b/pkgs/swarmauri/tests/unit/image_gens/DeepInfraImgGenModel_unit_test.py similarity index 97% rename from pkgs/swarmauri/tests/unit/llms/DeepInfraImgGenModel_unit_test.py rename to pkgs/swarmauri/tests/unit/image_gens/DeepInfraImgGenModel_unit_test.py index 98b3b7047..5492ff573 100644 --- a/pkgs/swarmauri/tests/unit/llms/DeepInfraImgGenModel_unit_test.py +++ b/pkgs/swarmauri/tests/unit/image_gens/DeepInfraImgGenModel_unit_test.py @@ -1,6 +1,6 @@ import pytest import os -from swarmauri.llms.concrete.DeepInfraImgGenModel import DeepInfraImgGenModel +from swarmauri.image_gens.concrete.DeepInfraImgGenModel import DeepInfraImgGenModel from dotenv import load_dotenv from swarmauri.utils.timeout_wrapper import timeout diff --git a/pkgs/swarmauri/tests/unit/llms/FalAIImgGenModel_unit_test.py b/pkgs/swarmauri/tests/unit/image_gens/FalAIImgGenModel_unit_test.py similarity index 97% rename from pkgs/swarmauri/tests/unit/llms/FalAIImgGenModel_unit_test.py rename to pkgs/swarmauri/tests/unit/image_gens/FalAIImgGenModel_unit_test.py index bf5b6d83f..858414f7f 100644 --- a/pkgs/swarmauri/tests/unit/llms/FalAIImgGenModel_unit_test.py +++ b/pkgs/swarmauri/tests/unit/image_gens/FalAIImgGenModel_unit_test.py @@ -1,6 +1,6 @@ import pytest import os -from swarmauri.llms.concrete.FalAIImgGenModel import FalAIImgGenModel +from swarmauri.image_gens.concrete.FalAIImgGenModel import FalAIImgGenModel from dotenv import load_dotenv from swarmauri.utils.timeout_wrapper import timeout diff --git a/pkgs/swarmauri/tests/unit/llms/HyperbolicImgGenModel_unit_test.py b/pkgs/swarmauri/tests/unit/image_gens/HyperbolicImgGenModel_unit_test.py similarity index 97% rename from pkgs/swarmauri/tests/unit/llms/HyperbolicImgGenModel_unit_test.py rename to pkgs/swarmauri/tests/unit/image_gens/HyperbolicImgGenModel_unit_test.py index 1ad1b8d16..3772b4dce 100644 --- a/pkgs/swarmauri/tests/unit/llms/HyperbolicImgGenModel_unit_test.py +++ b/pkgs/swarmauri/tests/unit/image_gens/HyperbolicImgGenModel_unit_test.py @@ -1,6 +1,6 @@ import pytest import os -from swarmauri.llms.concrete.HyperbolicImgGenModel import HyperbolicImgGenModel +from swarmauri.image_gens.concrete.HyperbolicImgGenModel import HyperbolicImgGenModel from dotenv import load_dotenv from swarmauri.utils.timeout_wrapper import timeout diff --git a/pkgs/swarmauri/tests/unit/llms/OpenAIImgGenModel_unit_tesst.py b/pkgs/swarmauri/tests/unit/image_gens/OpenAIImgGenModel_unit_tesst.py similarity index 97% rename from pkgs/swarmauri/tests/unit/llms/OpenAIImgGenModel_unit_tesst.py rename to pkgs/swarmauri/tests/unit/image_gens/OpenAIImgGenModel_unit_tesst.py index 7780ba042..b22b9e6ea 100644 --- a/pkgs/swarmauri/tests/unit/llms/OpenAIImgGenModel_unit_tesst.py +++ b/pkgs/swarmauri/tests/unit/image_gens/OpenAIImgGenModel_unit_tesst.py @@ -1,7 +1,7 @@ import pytest import os from dotenv import load_dotenv -from swarmauri.llms.concrete.OpenAIImgGenModel import OpenAIImgGenModel +from swarmauri.image_gens.concrete.OpenAIImgGenModel import OpenAIImgGenModel from swarmauri.utils.timeout_wrapper import timeout load_dotenv() diff --git a/pkgs/swarmauri/tests/unit/llms/BlackForestImgGenModel_unit_test.py b/pkgs/swarmauri/tests/unit/llms/BlackForestImgGenModel_unit_test.py deleted file mode 100644 index 706d03b61..000000000 --- a/pkgs/swarmauri/tests/unit/llms/BlackForestImgGenModel_unit_test.py +++ /dev/null @@ -1,120 +0,0 @@ -import pytest -import os -from dotenv import load_dotenv -from swarmauri.llms.concrete.BlackForestImgGenModel import ( - BlackForestImgGenModel, -) - -from swarmauri.utils.timeout_wrapper import timeout - -load_dotenv() - -API_KEY = os.getenv("BLACKFOREST_API_KEY") - - -@pytest.fixture(scope="module") -def blackforest_imggen_model(): - if not API_KEY: - pytest.skip("Skipping due to environment variable not set") - model = BlackForestImgGenModel(api_key=API_KEY) - return model - - -def get_allowed_models(): - if not API_KEY: - return [] - model = BlackForestImgGenModel(api_key=API_KEY) - return model.allowed_models - - -@timeout(5) -@pytest.mark.unit -def test_model_resource(blackforest_imggen_model): - assert blackforest_imggen_model.resource == "LLM" - - -@timeout(5) -@pytest.mark.unit -def test_model_type(blackforest_imggen_model): - assert blackforest_imggen_model.type == "BlackForestImgGenModel" - - -@timeout(5) -@pytest.mark.unit -def test_serialization(blackforest_imggen_model): - assert ( - blackforest_imggen_model.id - == BlackForestImgGenModel.model_validate_json( - blackforest_imggen_model.model_dump_json() - ).id - ) - - -@timeout(5) -@pytest.mark.unit -def test_default_model_name(blackforest_imggen_model): - assert blackforest_imggen_model.name == "flux-pro" - - -@timeout(5) -@pytest.mark.parametrize("model_name", get_allowed_models()) -@pytest.mark.unit -def test_generate_image(blackforest_imggen_model, model_name): - model = blackforest_imggen_model - model.name = model_name - - prompt = "A cute dog playing in a park" - image_url = model.generate_image(prompt=prompt) - - assert isinstance(image_url, str) - assert image_url.startswith("http") - - -@timeout(5) -@pytest.mark.asyncio -@pytest.mark.parametrize("model_name", get_allowed_models()) -@pytest.mark.unit -async def test_agenerate_image(blackforest_imggen_model, model_name): - model = blackforest_imggen_model - model.name = model_name - - prompt = "A mountain with snow and a river" - image_url = await model.agenerate_image(prompt=prompt) - - assert isinstance(image_url, str) - assert image_url.startswith("http") - - -@timeout(5) -@pytest.mark.unit -def test_batch_generate(blackforest_imggen_model): - prompts = [ - "A futuristic city skyline", - "A tropical beach at sunset", - "A cup of coffee on a desk", - ] - - image_urls = blackforest_imggen_model.batch_generate(prompts=prompts) - - assert len(image_urls) == len(prompts) - for url in image_urls: - assert isinstance(url, str) - assert url.startswith("http") - - -@timeout(5) -@pytest.mark.asyncio -@pytest.mark.unit -async def test_abatch_generate(blackforest_imggen_model): - prompts = [ - "A space station in orbit", - "A lion resting in the savannah", - "A rainy day in a city", - ] - - image_urls = await blackforest_imggen_model.abatch_generate(prompts=prompts) - - assert len(image_urls) == len(prompts) - for url in image_urls: - assert isinstance(url, str) - assert url.startswith("http") From 6eabe4f43e286200ece84b1a9292df97cdc9f4e0 Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:42:31 +0100 Subject: [PATCH 16/42] swarm - minor fix --- .../swarmauri/image_gens/concrete/BlackForestImgGenModel.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py b/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py index 202d5c631..d783c4204 100644 --- a/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py +++ b/pkgs/swarmauri/swarmauri/image_gens/concrete/BlackForestImgGenModel.py @@ -15,8 +15,9 @@ class BlackForestImgGenModel(ImageGenBase): """ _BASE_URL: str = PrivateAttr("https://api.bfl.ml") - _client: httpx.Client = PrivateAttr() + _client: httpx.Client = PrivateAttr(default=None) _async_client: httpx.AsyncClient = PrivateAttr(default=None) + _headers: Dict[str, str] = PrivateAttr(default=None) api_key: str allowed_models: List[str] = ["flux-pro-1.1", "flux-pro", "flux-dev"] From 2f85274e0d6bfbe5feefee4abc0b46b9a6a86613 Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Thu, 21 Nov 2024 11:59:46 +0100 Subject: [PATCH 17/42] comm - Refactor imports in vector store unit tests and add lazy loading utility --- .../document_stores/concrete/__init__.py | 13 +- .../embeddings/concrete/__init__.py | 12 + .../llms/concrete/__init__.py | 14 +- .../measurements/concrete/__init__.py | 17 +- .../parsers/concrete/__init__.py | 19 +- .../retrievers/concrete/__init__.py | 13 +- .../toolkits/concrete/__init__.py | 11 +- .../tools/concrete/__init__.py | 62 +-- .../vector_stores/base/__init__.py | 0 .../{ => concrete}/AnnoyVectorStore.py | 0 .../{ => concrete}/CloudQdrantVectorStore.py | 0 .../CloudWeaviateVectorStore.py | 434 +++++++++--------- .../{ => concrete}/Doc2VecVectorStore.py | 0 .../{ => concrete}/DuckDBVectorStore.py | 0 .../{ => concrete}/MlmVectorStore.py | 0 .../{ => concrete}/Neo4jVectorStore.py | 0 .../PersistentChromaDBVectorStore.py | 0 .../PersistentQdrantVectorStore.py | 0 .../{ => concrete}/PineconeVectorStore.py | 0 .../{ => concrete}/RedisVectorStore.py | 0 .../vector_stores/concrete/__init__.py | 20 + .../vector_stores/AnnoyVectorStore_test.py | 2 +- .../CloudQdrantVectorStore_test.py | 2 +- .../CloudWeaviateVectorStore_test.py | 2 +- .../Doc2VecVectorStore_unit_test.py | 2 +- .../DuckDBVectorStore_unit_test.py | 2 +- .../vector_stores/MlmVectorStore_unit_test.py | 2 +- .../vector_stores/Neo4jVectorStore_test.py | 2 +- .../PersistentChromadbVectorStore_test.py | 2 +- .../PersistentQdrantVectorStore_test.py | 2 +- .../vector_stores/PineconeVectorStore_test.py | 2 +- .../vector_stores/RedisVectorStore_test.py | 4 +- .../swarmauri/swarmauri/utils/_lazy_import.py | 22 + 33 files changed, 381 insertions(+), 280 deletions(-) create mode 100644 pkgs/community/swarmauri_community/vector_stores/base/__init__.py rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/AnnoyVectorStore.py (100%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/CloudQdrantVectorStore.py (100%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/CloudWeaviateVectorStore.py (97%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/Doc2VecVectorStore.py (100%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/DuckDBVectorStore.py (100%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/MlmVectorStore.py (100%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/Neo4jVectorStore.py (100%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/PersistentChromaDBVectorStore.py (100%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/PersistentQdrantVectorStore.py (100%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/PineconeVectorStore.py (100%) rename pkgs/community/swarmauri_community/vector_stores/{ => concrete}/RedisVectorStore.py (100%) create mode 100644 pkgs/community/swarmauri_community/vector_stores/concrete/__init__.py create mode 100644 pkgs/swarmauri/swarmauri/utils/_lazy_import.py diff --git a/pkgs/community/swarmauri_community/document_stores/concrete/__init__.py b/pkgs/community/swarmauri_community/document_stores/concrete/__init__.py index a44ceb5c2..941fc5618 100644 --- a/pkgs/community/swarmauri_community/document_stores/concrete/__init__.py +++ b/pkgs/community/swarmauri_community/document_stores/concrete/__init__.py @@ -1,3 +1,10 @@ -from swarmauri_community.document_stores.concrete.RedisDocumentStore import ( - RedisDocumentStore, -) +from swarmauri.utils._lazy_import import _lazy_import + +documents_stores_files = [ + ("swarmauri_community.documents_stores.concrete.RedisDocumentStore", "RedisDocumentStore"), +] + +for module_name, class_name in documents_stores_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +__all__ = [class_name for _, class_name in documents_stores_files] diff --git a/pkgs/community/swarmauri_community/embeddings/concrete/__init__.py b/pkgs/community/swarmauri_community/embeddings/concrete/__init__.py index e69de29bb..7bcc482d7 100644 --- a/pkgs/community/swarmauri_community/embeddings/concrete/__init__.py +++ b/pkgs/community/swarmauri_community/embeddings/concrete/__init__.py @@ -0,0 +1,12 @@ +from swarmauri.utils._lazy_import import _lazy_import + + +embeddings_files = [ + ("swarmauri_community.embeddings.concrete.Doc2VecEmbedding", "Doc2VecEmbedding"), + ("swarmauri_community.embeddings.concrete.MlmEmbedding", "MlmEmbedding"), +] + +for module_name, class_name in embeddings_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +__all__ = [class_name for _, class_name in embeddings_files] diff --git a/pkgs/community/swarmauri_community/llms/concrete/__init__.py b/pkgs/community/swarmauri_community/llms/concrete/__init__.py index a8fa703c0..5c2266ce5 100644 --- a/pkgs/community/swarmauri_community/llms/concrete/__init__.py +++ b/pkgs/community/swarmauri_community/llms/concrete/__init__.py @@ -1,4 +1,12 @@ -from swarmauri_community.llms.concrete.LeptonAIImgGenModel import LeptonAIImgGenModel -from swarmauri_community.llms.concrete.LeptonAIModel import LeptonAIModel +from swarmauri.utils._lazy_import import _lazy_import -__all__ = ["LeptonAIImgGenModel", "LeptonAIModel"] +llms_files = [ + ("swarmauri_community.llms.concrete.LeptonAIImgGenModel", "LeptonAIImgGenModel"), + ("swarmauri_community.llms.concrete.LeptonAIModel", "LeptonAIModel"), + ("swarmauri_community.llms.concrete.PytesseractImg2TextModel", "PytesseractImg2TextModel"), +] + +for module_name, class_name in llms_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +__all__ = [class_name for _, class_name in llms_files] diff --git a/pkgs/community/swarmauri_community/measurements/concrete/__init__.py b/pkgs/community/swarmauri_community/measurements/concrete/__init__.py index 276716315..a748c6a0e 100644 --- a/pkgs/community/swarmauri_community/measurements/concrete/__init__.py +++ b/pkgs/community/swarmauri_community/measurements/concrete/__init__.py @@ -1,6 +1,11 @@ -from swarmauri_community.measurements.concrete.MutualInformationMeasurement import ( - MutualInformationMeasurement, -) -from swarmauri_community.measurements.concrete.TokenCountEstimatorMeasurement import ( - TokenCountEstimatorMeasurement, -) +from swarmauri.utils._lazy_import import _lazy_import + +measurement_files = [ + ("swarmauri_community.measurements.concrete.MutualInformationMeasurement", "MutualInformationMeasurement"), + ("swarmauri_community.measurements.concrete.TokenCountEstimatorMeasurement", "TokenCountEstimatorMeasurement"), +] + +for module_name, class_name in measurement_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +__all__ = [class_name for _, class_name in measurement_files] diff --git a/pkgs/community/swarmauri_community/parsers/concrete/__init__.py b/pkgs/community/swarmauri_community/parsers/concrete/__init__.py index b5d547c4e..3808b4733 100644 --- a/pkgs/community/swarmauri_community/parsers/concrete/__init__.py +++ b/pkgs/community/swarmauri_community/parsers/concrete/__init__.py @@ -1,3 +1,16 @@ -from swarmauri_community.parsers.concrete.FitzPdfParser import PDFtoTextParser -from swarmauri_community.parsers.concrete.PyPDF2Parser import PyPDF2Parser -from swarmauri_community.parsers.concrete.PyPDFTKParser import PyPDFTKParser +from swarmauri.utils._lazy_import import _lazy_import + +parsers_files = [ + ("swarmauri_community.parsers.concrete.BERTEmbeddingParser", "BERTEmbeddingParser"), + ("swarmauri_community.parsers.concrete.EntityRecognitionParser", "EntityRecognitionParser"), + ("swarmauri_community.parsers.concrete.FitzPdfParser", "FitzPdfParser"), + ("swarmauri_community.parsers.concrete.PyPDF2Parser", "PyPDF2Parser"), + ("swarmauri_community.parsers.concrete.PyPDFTKParser", "PyPDFTKParser"), + ("swarmauri_community.parsers.concrete.TextBlobNounParser", "TextBlobNounParser"), + ("swarmauri_community.parsers.concrete.TextBlobSentenceParser", "TextBlobSentenceParser"), +] + +for module_name, class_name in parsers_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +__all__ = [class_name for _, class_name in parsers_files] diff --git a/pkgs/community/swarmauri_community/retrievers/concrete/__init__.py b/pkgs/community/swarmauri_community/retrievers/concrete/__init__.py index 000e57ffe..ec089ab66 100644 --- a/pkgs/community/swarmauri_community/retrievers/concrete/__init__.py +++ b/pkgs/community/swarmauri_community/retrievers/concrete/__init__.py @@ -1,5 +1,10 @@ -# -*- coding: utf-8 -*- +from swarmauri.utils._lazy_import import _lazy_import -from swarmauri_community.retrievers.concrete.RedisDocumentRetriever import ( - RedisDocumentRetriever, -) +retriever_files = [ + ("swarmauri_community.retrievers.concrete.RedisDocumentRetriever", "RedisDocumentRetriever"), +] + +for module_name, class_name in retriever_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +__all__ = [class_name for _, class_name in retriever_files] diff --git a/pkgs/community/swarmauri_community/toolkits/concrete/__init__.py b/pkgs/community/swarmauri_community/toolkits/concrete/__init__.py index 129ad1dc9..6aca27ddc 100644 --- a/pkgs/community/swarmauri_community/toolkits/concrete/__init__.py +++ b/pkgs/community/swarmauri_community/toolkits/concrete/__init__.py @@ -1 +1,10 @@ -from swarmauri_community.toolkits.concrete.GithubToolkit import * +from swarmauri.utils._lazy_import import _lazy_import + +toolkits_files = [ + ("swarmauri_community.toolkits.concrete.GithubToolkit", "GithubToolkit"), +] + +for module_name, class_name in toolkits_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +__all__ = [class_name for _, class_name in toolkits_files] diff --git a/pkgs/community/swarmauri_community/tools/concrete/__init__.py b/pkgs/community/swarmauri_community/tools/concrete/__init__.py index 9db51c17f..fe50a5544 100644 --- a/pkgs/community/swarmauri_community/tools/concrete/__init__.py +++ b/pkgs/community/swarmauri_community/tools/concrete/__init__.py @@ -1,31 +1,33 @@ -from swarmauri_community.tools.concrete.CaptchaGeneratorTool import CaptchaGeneratorTool -from swarmauri_community.tools.concrete.DaleChallReadabilityTool import ( - DaleChallReadabilityTool, -) -from swarmauri_community.tools.concrete.DownloadPdfTool import DownloadPDFTool -from swarmauri_community.tools.concrete.EntityRecognitionTool import ( - EntityRecognitionTool, -) -from swarmauri_community.tools.concrete.FoliumTool import FoliumTool -from swarmauri_community.tools.concrete.GithubBranchTool import GithubBranchTool -from swarmauri_community.tools.concrete.GithubCommitTool import GithubCommitTool -from swarmauri_community.tools.concrete.GithubIssueTool import GithubIssueTool -from swarmauri_community.tools.concrete.GithubPRTool import GithubPRTool -from swarmauri_community.tools.concrete.GithubRepoTool import GithubRepoTool -from swarmauri_community.tools.concrete.GithubTool import GithubTool -from swarmauri_community.tools.concrete.GmailReadTool import GmailReadTool -from swarmauri_community.tools.concrete.GmailSendTool import GmailSendTool -from swarmauri_community.tools.concrete.LexicalDensityTool import LexicalDensityTool -from swarmauri_community.tools.concrete.PsutilTool import PsutilTool -from swarmauri_community.tools.concrete.QrCodeGeneratorTool import QrCodeGeneratorTool -from swarmauri_community.tools.concrete.SentenceComplexityTool import ( - SentenceComplexityTool, -) -from swarmauri_community.tools.concrete.SentimentAnalysisTool import ( - SentimentAnalysisTool, -) -from swarmauri_community.tools.concrete.SMOGIndexTool import SMOGIndexTool -from swarmauri_community.tools.concrete.WebScrapingTool import WebScrapingTool -from swarmauri_community.tools.concrete.ZapierHookTool import ZapierHookTool +from swarmauri.utils._lazy_import import _lazy_import -# from swarmauri_community.tools.concrete.PaCMAPTool import PaCMAPTool +tool_files = [ + ("swarmauri_community.tools.concrete.CaptchaGeneratorTool", "CaptchaGeneratorTool"), + ("swarmauri_community.tools.concrete.DaleChallReadabilityTool", "DaleChallReadabilityTool"), + ("swarmauri_community.tools.concrete.DownloadPdfTool", "DownloadPDFTool"), + ("swarmauri_community.tools.concrete.EntityRecognitionTool", "EntityRecognitionTool"), + ("swarmauri_community.tools.concrete.FoliumTool", "FoliumTool"), + ("swarmauri_community.tools.concrete.GithubBranchTool", "GithubBranchTool"), + ("swarmauri_community.tools.concrete.GithubCommitTool", "GithubCommitTool"), + ("swarmauri_community.tools.concrete.GithubIssueTool", "GithubIssueTool"), + ("swarmauri_community.tools.concrete.GithubPRTool", "GithubPRTool"), + ("swarmauri_community.tools.concrete.GithubRepoTool", "GithubRepoTool"), + ("swarmauri_community.tools.concrete.GithubTool", "GithubTool"), + ("swarmauri_community.tools.concrete.GmailReadTool", "GmailReadTool"), + ("swarmauri_community.tools.concrete.GmailSendTool", "GmailSendTool"), + ("swarmauri_community.tools.concrete.LexicalDensityTool", "LexicalDensityTool"), + ("swarmauri_community.tools.concrete.PsutilTool", "PsutilTool"), + ("swarmauri_community.tools.concrete.QrCodeGeneratorTool", "QrCodeGeneratorTool"), + ("swarmauri_community.tools.concrete.SentenceComplexityTool", "SentenceComplexityTool"), + ("swarmauri_community.tools.concrete.SentimentAnalysisTool", "SentimentAnalysisTool"), + ("swarmauri_community.tools.concrete.SMOGIndexTool", "SMOGIndexTool"), + ("swarmauri_community.tools.concrete.WebScrapingTool", "WebScrapingTool"), + ("swarmauri_community.tools.concrete.ZapierHookTool", "ZapierHookTool"), + # ("swarmauri_community.tools.concrete.PaCMAPTool", "PaCMAPTool"), +] + +# Lazy loading of tools, storing them in variables +for module_name, class_name in tool_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded tools to __all__ +__all__ = [class_name for _, class_name in tool_files] diff --git a/pkgs/community/swarmauri_community/vector_stores/base/__init__.py b/pkgs/community/swarmauri_community/vector_stores/base/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/community/swarmauri_community/vector_stores/AnnoyVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/AnnoyVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/AnnoyVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/AnnoyVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/CloudQdrantVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/CloudQdrantVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/CloudQdrantVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/CloudQdrantVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/CloudWeaviateVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/CloudWeaviateVectorStore.py similarity index 97% rename from pkgs/community/swarmauri_community/vector_stores/CloudWeaviateVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/CloudWeaviateVectorStore.py index 9e55fc0ca..0e1ffced7 100644 --- a/pkgs/community/swarmauri_community/vector_stores/CloudWeaviateVectorStore.py +++ b/pkgs/community/swarmauri_community/vector_stores/concrete/CloudWeaviateVectorStore.py @@ -1,218 +1,218 @@ -from typing import List, Union, Literal, Optional -from pydantic import BaseModel, PrivateAttr -import uuid as ud -import weaviate -from weaviate.classes.init import Auth -from weaviate.util import generate_uuid5 -from weaviate.classes.query import MetadataQuery - -from swarmauri.documents.concrete.Document import Document -from swarmauri.embeddings.concrete.Doc2VecEmbedding import Doc2VecEmbedding -from swarmauri.vectors.concrete.Vector import Vector - -from swarmauri.vector_stores.base.VectorStoreBase import VectorStoreBase -from swarmauri.vector_stores.base.VectorStoreRetrieveMixin import VectorStoreRetrieveMixin -from swarmauri.vector_stores.base.VectorStoreSaveLoadMixin import VectorStoreSaveLoadMixin -from swarmauri.vector_stores.base.VectorStoreCloudMixin import VectorStoreCloudMixin - - -class CloudWeaviateVectorStore(VectorStoreSaveLoadMixin, VectorStoreRetrieveMixin, VectorStoreBase, VectorStoreCloudMixin): - type: Literal["CloudWeaviateVectorStore"] = "CloudWeaviateVectorStore" - - - # Private attributes - _client: Optional[weaviate.Client] = PrivateAttr(default=None) - _embedder: Doc2VecEmbedding = PrivateAttr(default=None) - _namespace_uuid: ud.UUID = PrivateAttr(default_factory=ud.uuid4) - - def __init__(self, **data): - super().__init__(**data) - - # Initialize the vectorizer and Weaviate client - self._embedder = Doc2VecEmbedding(vector_size=self.vector_size) - # self._initialize_client() - - def connect(self, **kwargs): - """ - Initialize the Weaviate client. - """ - if self._client is None: - self._client = weaviate.connect_to_weaviate_cloud( - cluster_url=self.url, - auth_credentials=Auth.api_key(self.api_key), - headers=kwargs.get("headers", {}) - ) - - def disconnect(self) -> None: - """ - Disconnects from the Qdrant cloud vector store. - """ - if self.client is not None: - self.client = None - - def add_document(self, document: Document) -> None: - """ - Add a single document to the vector store. - - :param document: Document to add - """ - try: - collection = self._client.collections.get(self.collection_name) - - # Generate or use existing embedding - embedding = document.embedding or self._embedder.fit_transform([document.content])[0] - - data_object = { - "content": document.content, - "metadata": document.metadata, - } - - # Generate UUID for document - uuid = ( - str(ud.uuid5(self._namespace_uuid, document.id)) - if document.id - else generate_uuid5(data_object) - ) - - collection.data.insert( - properties=data_object, - vector=embedding.value, - uuid=uuid, - ) - - print(f"Document '{document.id}' added to Weaviate.") - except Exception as e: - print(f"Error adding document '{document.id}': {e}") - raise - - def add_documents(self, documents: List[Document]) -> None: - """ - Add multiple documents to the vector store. - - :param documents: List of documents to add - """ - try: - for document in documents: - self.add_document(document) - - print(f"{len(documents)} documents added to Weaviate.") - except Exception as e: - print(f"Error adding documents: {e}") - raise - - def get_document(self, id: str) -> Union[Document, None]: - """ - Retrieve a document by its ID. - - :param id: Document ID - :return: Document object or None if not found - """ - try: - collection = self._client.collections.get(self.collection_name) - - result = collection.query.fetch_object_by_id(ud.uuid5(self._namespace_uuid, id)) - - if result: - return Document( - id=id, - content=result.properties["content"], - metadata=result.properties["metadata"], - ) - return None - except Exception as e: - print(f"Error retrieving document '{id}': {e}") - return None - - def get_all_documents(self) -> List[Document]: - """ - Retrieve all documents from the vector store. - - :return: List of Document objects - """ - try: - collection = self._client.collections.get(self.collection_name) - # return collection - documents = [ - Document( - content=item.properties["content"], - metadata=item.properties["metadata"], - embedding=Vector(value=list(item.vector.values())[0]), - ) - for item in collection.iterator(include_vector=True) - ] - return documents - except Exception as e: - print(f"Error retrieving all documents: {e}") - return [] - - def delete_document(self, id: str) -> None: - """ - Delete a document by its ID. - - :param id: Document ID - """ - try: - collection = self._client.collections.get(self.collection_name) - collection.data.delete_by_id(ud.uuid5(self._namespace_uuid, id)) - print(f"Document '{id}' has been deleted from Weaviate.") - except Exception as e: - print(f"Error deleting document '{id}': {e}") - raise - - def update_document(self, id: str, document: Document) -> None: - """ - Update an existing document. - - :param id: Document ID - :param updated_document: Document object with updated data - """ - self.delete_document(id) - self.add_document(document) - - def retrieve(self, query: str, top_k: int = 5) -> List[Document]: - """ - Retrieve the top_k most relevant documents based on the given query. - - :param query: Query string - :param top_k: Number of top similar documents to retrieve - :return: List of Document objects - """ - try: - collection = self._client.collections.get(self.collection_name) - query_vector = self._embedder.infer_vector(query) - response = collection.query.near_vector( - near_vector=query_vector.value, - limit=top_k, - return_metadata=MetadataQuery(distance=True), - ) - - documents = [ - Document( - # id=res.id, - content=res.properties["content"], - metadata=res.properties["metadata"], - ) - for res in response.objects - ] - return documents - except Exception as e: - print(f"Error retrieving documents for query '{query}': {e}") - return [] - - def close(self): - """ - Close the connection to the Weaviate server. - """ - if self._client: - self._client.close() - - def model_dump_json(self, *args, **kwargs) -> str: - # Call the disconnect method before serialization - self.disconnect() - - # Now proceed with the usual JSON serialization - return super().model_dump_json(*args, **kwargs) - - - def __del__(self): +from typing import List, Union, Literal, Optional +from pydantic import BaseModel, PrivateAttr +import uuid as ud +import weaviate +from weaviate.classes.init import Auth +from weaviate.util import generate_uuid5 +from weaviate.classes.query import MetadataQuery + +from swarmauri.documents.concrete.Document import Document +from swarmauri.embeddings.concrete.Doc2VecEmbedding import Doc2VecEmbedding +from swarmauri.vectors.concrete.Vector import Vector + +from swarmauri.vector_stores.base.VectorStoreBase import VectorStoreBase +from swarmauri.vector_stores.base.VectorStoreRetrieveMixin import VectorStoreRetrieveMixin +from swarmauri.vector_stores.base.VectorStoreSaveLoadMixin import VectorStoreSaveLoadMixin +from swarmauri.vector_stores.base.VectorStoreCloudMixin import VectorStoreCloudMixin + + +class CloudWeaviateVectorStore(VectorStoreSaveLoadMixin, VectorStoreRetrieveMixin, VectorStoreBase, VectorStoreCloudMixin): + type: Literal["CloudWeaviateVectorStore"] = "CloudWeaviateVectorStore" + + + # Private attributes + _client: Optional[weaviate.Client] = PrivateAttr(default=None) + _embedder: Doc2VecEmbedding = PrivateAttr(default=None) + _namespace_uuid: ud.UUID = PrivateAttr(default_factory=ud.uuid4) + + def __init__(self, **data): + super().__init__(**data) + + # Initialize the vectorizer and Weaviate client + self._embedder = Doc2VecEmbedding(vector_size=self.vector_size) + # self._initialize_client() + + def connect(self, **kwargs): + """ + Initialize the Weaviate client. + """ + if self._client is None: + self._client = weaviate.connect_to_weaviate_cloud( + cluster_url=self.url, + auth_credentials=Auth.api_key(self.api_key), + headers=kwargs.get("headers", {}) + ) + + def disconnect(self) -> None: + """ + Disconnects from the Qdrant cloud vector store. + """ + if self.client is not None: + self.client = None + + def add_document(self, document: Document) -> None: + """ + Add a single document to the vector store. + + :param document: Document to add + """ + try: + collection = self._client.collections.get(self.collection_name) + + # Generate or use existing embedding + embedding = document.embedding or self._embedder.fit_transform([document.content])[0] + + data_object = { + "content": document.content, + "metadata": document.metadata, + } + + # Generate UUID for document + uuid = ( + str(ud.uuid5(self._namespace_uuid, document.id)) + if document.id + else generate_uuid5(data_object) + ) + + collection.data.insert( + properties=data_object, + vector=embedding.value, + uuid=uuid, + ) + + print(f"Document '{document.id}' added to Weaviate.") + except Exception as e: + print(f"Error adding document '{document.id}': {e}") + raise + + def add_documents(self, documents: List[Document]) -> None: + """ + Add multiple documents to the vector store. + + :param documents: List of documents to add + """ + try: + for document in documents: + self.add_document(document) + + print(f"{len(documents)} documents added to Weaviate.") + except Exception as e: + print(f"Error adding documents: {e}") + raise + + def get_document(self, id: str) -> Union[Document, None]: + """ + Retrieve a document by its ID. + + :param id: Document ID + :return: Document object or None if not found + """ + try: + collection = self._client.collections.get(self.collection_name) + + result = collection.query.fetch_object_by_id(ud.uuid5(self._namespace_uuid, id)) + + if result: + return Document( + id=id, + content=result.properties["content"], + metadata=result.properties["metadata"], + ) + return None + except Exception as e: + print(f"Error retrieving document '{id}': {e}") + return None + + def get_all_documents(self) -> List[Document]: + """ + Retrieve all documents from the vector store. + + :return: List of Document objects + """ + try: + collection = self._client.collections.get(self.collection_name) + # return collection + documents = [ + Document( + content=item.properties["content"], + metadata=item.properties["metadata"], + embedding=Vector(value=list(item.vector.values())[0]), + ) + for item in collection.iterator(include_vector=True) + ] + return documents + except Exception as e: + print(f"Error retrieving all documents: {e}") + return [] + + def delete_document(self, id: str) -> None: + """ + Delete a document by its ID. + + :param id: Document ID + """ + try: + collection = self._client.collections.get(self.collection_name) + collection.data.delete_by_id(ud.uuid5(self._namespace_uuid, id)) + print(f"Document '{id}' has been deleted from Weaviate.") + except Exception as e: + print(f"Error deleting document '{id}': {e}") + raise + + def update_document(self, id: str, document: Document) -> None: + """ + Update an existing document. + + :param id: Document ID + :param updated_document: Document object with updated data + """ + self.delete_document(id) + self.add_document(document) + + def retrieve(self, query: str, top_k: int = 5) -> List[Document]: + """ + Retrieve the top_k most relevant documents based on the given query. + + :param query: Query string + :param top_k: Number of top similar documents to retrieve + :return: List of Document objects + """ + try: + collection = self._client.collections.get(self.collection_name) + query_vector = self._embedder.infer_vector(query) + response = collection.query.near_vector( + near_vector=query_vector.value, + limit=top_k, + return_metadata=MetadataQuery(distance=True), + ) + + documents = [ + Document( + # id=res.id, + content=res.properties["content"], + metadata=res.properties["metadata"], + ) + for res in response.objects + ] + return documents + except Exception as e: + print(f"Error retrieving documents for query '{query}': {e}") + return [] + + def close(self): + """ + Close the connection to the Weaviate server. + """ + if self._client: + self._client.close() + + def model_dump_json(self, *args, **kwargs) -> str: + # Call the disconnect method before serialization + self.disconnect() + + # Now proceed with the usual JSON serialization + return super().model_dump_json(*args, **kwargs) + + + def __del__(self): self.close() \ No newline at end of file diff --git a/pkgs/community/swarmauri_community/vector_stores/Doc2VecVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/Doc2VecVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/Doc2VecVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/Doc2VecVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/DuckDBVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/DuckDBVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/DuckDBVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/DuckDBVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/MlmVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/MlmVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/MlmVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/MlmVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/Neo4jVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/Neo4jVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/Neo4jVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/Neo4jVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/PersistentChromaDBVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/PersistentChromaDBVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/PersistentChromaDBVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/PersistentChromaDBVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/PersistentQdrantVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/PersistentQdrantVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/PersistentQdrantVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/PersistentQdrantVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/PineconeVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/PineconeVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/PineconeVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/PineconeVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/RedisVectorStore.py b/pkgs/community/swarmauri_community/vector_stores/concrete/RedisVectorStore.py similarity index 100% rename from pkgs/community/swarmauri_community/vector_stores/RedisVectorStore.py rename to pkgs/community/swarmauri_community/vector_stores/concrete/RedisVectorStore.py diff --git a/pkgs/community/swarmauri_community/vector_stores/concrete/__init__.py b/pkgs/community/swarmauri_community/vector_stores/concrete/__init__.py new file mode 100644 index 000000000..f920d22b4 --- /dev/null +++ b/pkgs/community/swarmauri_community/vector_stores/concrete/__init__.py @@ -0,0 +1,20 @@ +from swarmauri.utils._lazy_import import _lazy_import + +vector_store_files = [ + ("swarmauri_community.vector_stores.concrete.AnnoyVectorStore", "AnnoyVectorStore"), + ("swarmauri_community.vector_stores.concrete.CloudQdrantVectorStore", "CloudQdrantVectorStore"), + ("swarmauri_community.vector_stores.concrete.CloudWeaviateVectorStore", "CloudWeaviateVectorStore"), + ("swarmauri_community.vector_stores.concrete.Doc2VecVectorStore", "Doc2VecVectorStore"), + ("swarmauri_community.vector_stores.concrete.DuckDBVectorStore", "DuckDBVectorStore"), + ("swarmauri_community.vector_stores.concrete.MlmVectorStore", "MlmVectorStore"), + ("swarmauri_community.vector_stores.concrete.Neo4jVectorStore", "Neo4jVectorStore"), + ("swarmauri_community.vector_stores.concrete.PersistentChromaDBVectorStore", "PersistentChromaDBVectorStore"), + ("swarmauri_community.vector_stores.concrete.PersistentQdrantVectorStore", "PersistentQdrantVectorStore"), + ("swarmauri_community.vector_stores.concrete.PineconeVectorStore", "PineconeVectorStore"), + ("swarmauri_community.vector_stores.concrete.RedisVectorStore", "RedisVectorStore"), +] + +for module_name, class_name in vector_store_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +__all__ = [class_name for _, class_name in vector_store_files] diff --git a/pkgs/community/tests/unit/vector_stores/AnnoyVectorStore_test.py b/pkgs/community/tests/unit/vector_stores/AnnoyVectorStore_test.py index 9f2ce8320..cee7afddd 100644 --- a/pkgs/community/tests/unit/vector_stores/AnnoyVectorStore_test.py +++ b/pkgs/community/tests/unit/vector_stores/AnnoyVectorStore_test.py @@ -1,6 +1,6 @@ import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.AnnoyVectorStore import AnnoyVectorStore +from swarmauri_community.vector_stores.concrete.AnnoyVectorStore import AnnoyVectorStore # Fixture for creating an AnnoyVectorStore instance diff --git a/pkgs/community/tests/unit/vector_stores/CloudQdrantVectorStore_test.py b/pkgs/community/tests/unit/vector_stores/CloudQdrantVectorStore_test.py index 2b9028c72..26ee25841 100644 --- a/pkgs/community/tests/unit/vector_stores/CloudQdrantVectorStore_test.py +++ b/pkgs/community/tests/unit/vector_stores/CloudQdrantVectorStore_test.py @@ -1,7 +1,7 @@ import os import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.CloudQdrantVectorStore import ( +from swarmauri_community.vector_stores.concrete.CloudQdrantVectorStore import ( CloudQdrantVectorStore, ) diff --git a/pkgs/community/tests/unit/vector_stores/CloudWeaviateVectorStore_test.py b/pkgs/community/tests/unit/vector_stores/CloudWeaviateVectorStore_test.py index 051d6aa6b..9bad53191 100644 --- a/pkgs/community/tests/unit/vector_stores/CloudWeaviateVectorStore_test.py +++ b/pkgs/community/tests/unit/vector_stores/CloudWeaviateVectorStore_test.py @@ -1,7 +1,7 @@ import os import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.CloudWeaviateVectorStore import ( +from swarmauri_community.vector_stores.concrete.CloudWeaviateVectorStore import ( CloudWeaviateVectorStore, ) from dotenv import load_dotenv diff --git a/pkgs/community/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py b/pkgs/community/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py index 01c8eb634..497e8a45f 100644 --- a/pkgs/community/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py +++ b/pkgs/community/tests/unit/vector_stores/Doc2VecVectorStore_unit_test.py @@ -1,6 +1,6 @@ import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.Doc2VecVectorStore import Doc2VecVectorStore +from swarmauri_community.vector_stores.concrete.Doc2VecVectorStore import Doc2VecVectorStore @pytest.mark.unit diff --git a/pkgs/community/tests/unit/vector_stores/DuckDBVectorStore_unit_test.py b/pkgs/community/tests/unit/vector_stores/DuckDBVectorStore_unit_test.py index 28bd33080..0b247ccd8 100644 --- a/pkgs/community/tests/unit/vector_stores/DuckDBVectorStore_unit_test.py +++ b/pkgs/community/tests/unit/vector_stores/DuckDBVectorStore_unit_test.py @@ -2,7 +2,7 @@ import os import json from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.DuckDBVectorStore import DuckDBVectorStore +from swarmauri_community.vector_stores.concrete.DuckDBVectorStore import DuckDBVectorStore @pytest.fixture(params=[":memory:", "test_db.db"]) diff --git a/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py b/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py index 5f486aebb..1c3dc9273 100644 --- a/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py +++ b/pkgs/community/tests/unit/vector_stores/MlmVectorStore_unit_test.py @@ -1,6 +1,6 @@ import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.MlmVectorStore import MlmVectorStore +from swarmauri_community.vector_stores.concrete.MlmVectorStore import MlmVectorStore @pytest.mark.unit diff --git a/pkgs/community/tests/unit/vector_stores/Neo4jVectorStore_test.py b/pkgs/community/tests/unit/vector_stores/Neo4jVectorStore_test.py index 74de4851b..d5e4699f6 100644 --- a/pkgs/community/tests/unit/vector_stores/Neo4jVectorStore_test.py +++ b/pkgs/community/tests/unit/vector_stores/Neo4jVectorStore_test.py @@ -2,7 +2,7 @@ import pytest from dotenv import load_dotenv from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.Neo4jVectorStore import Neo4jVectorStore +from swarmauri_community.vector_stores.concrete.Neo4jVectorStore import Neo4jVectorStore # Load environment variables load_dotenv() diff --git a/pkgs/community/tests/unit/vector_stores/PersistentChromadbVectorStore_test.py b/pkgs/community/tests/unit/vector_stores/PersistentChromadbVectorStore_test.py index b973277f1..66e0f0ed3 100644 --- a/pkgs/community/tests/unit/vector_stores/PersistentChromadbVectorStore_test.py +++ b/pkgs/community/tests/unit/vector_stores/PersistentChromadbVectorStore_test.py @@ -1,7 +1,7 @@ import os import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.PersistentChromaDBVectorStore import ( +from swarmauri_community.vector_stores.concrete.PersistentChromaDBVectorStore import ( PersistentChromaDBVectorStore, ) diff --git a/pkgs/community/tests/unit/vector_stores/PersistentQdrantVectorStore_test.py b/pkgs/community/tests/unit/vector_stores/PersistentQdrantVectorStore_test.py index 2277a87a4..d58c4295f 100644 --- a/pkgs/community/tests/unit/vector_stores/PersistentQdrantVectorStore_test.py +++ b/pkgs/community/tests/unit/vector_stores/PersistentQdrantVectorStore_test.py @@ -1,7 +1,7 @@ import os import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.PersistentQdrantVectorStore import ( +from swarmauri_community.vector_stores.concrete.PersistentQdrantVectorStore import ( PersistentQdrantVectorStore, ) diff --git a/pkgs/community/tests/unit/vector_stores/PineconeVectorStore_test.py b/pkgs/community/tests/unit/vector_stores/PineconeVectorStore_test.py index de2749df0..803a3f61c 100644 --- a/pkgs/community/tests/unit/vector_stores/PineconeVectorStore_test.py +++ b/pkgs/community/tests/unit/vector_stores/PineconeVectorStore_test.py @@ -1,7 +1,7 @@ import os import pytest from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.PineconeVectorStore import PineconeVectorStore +from swarmauri_community.vector_stores.concrete.PineconeVectorStore import PineconeVectorStore from dotenv import load_dotenv load_dotenv() diff --git a/pkgs/community/tests/unit/vector_stores/RedisVectorStore_test.py b/pkgs/community/tests/unit/vector_stores/RedisVectorStore_test.py index 17cbce8a2..80fc1a2d5 100644 --- a/pkgs/community/tests/unit/vector_stores/RedisVectorStore_test.py +++ b/pkgs/community/tests/unit/vector_stores/RedisVectorStore_test.py @@ -1,8 +1,6 @@ import pytest -import numpy as np -from swarmauri.documents.concrete.Document import Document -from swarmauri_community.vector_stores.RedisVectorStore import RedisVectorStore from swarmauri.documents.concrete.Document import Document +from swarmauri_community.vector_stores.concrete.RedisVectorStore import RedisVectorStore from dotenv import load_dotenv from os import getenv diff --git a/pkgs/swarmauri/swarmauri/utils/_lazy_import.py b/pkgs/swarmauri/swarmauri/utils/_lazy_import.py new file mode 100644 index 000000000..a3d3bd34a --- /dev/null +++ b/pkgs/swarmauri/swarmauri/utils/_lazy_import.py @@ -0,0 +1,22 @@ +import importlib + + +# Define a lazy loader function with a warning message if the module or class is not found +def _lazy_import(module_name, class_name): + try: + # Import the module + module = importlib.import_module(module_name) + # Dynamically get the class from the module + return getattr(module, class_name) + except ImportError: + # If module is not available, print a warning message + print( + f"Warning: The module '{module_name}' is not available. " + f"Please install the necessary dependencies to enable this functionality." + ) + return None + except AttributeError: + print( + f"Warning: The class '{class_name}' was not found in module '{module_name}'." + ) + return None From 58b6ad1fc4b6d685ba11a49b36eea1bfeac18939 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:28:07 -0600 Subject: [PATCH 18/42] swarm - Update pyproject.toml --- pkgs/swarmauri/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index 04eb35465..58608f84c 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri" -version = "0.5.2" +version = "0.5.3.dev1" description = "This repository includes base classes, concrete generics, and concrete standard components within the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" From 9e5376a8985b372bccf1bf0eeaff47ebeb4aba13 Mon Sep 17 00:00:00 2001 From: 3rdSon Date: Thu, 21 Nov 2024 12:35:23 +0100 Subject: [PATCH 19/42] swarm - changed the init files --- .../agent_factories/concrete/__init__.py | 30 +++++-- .../swarmauri/agents/concrete/__init__.py | 21 +---- .../swarmauri/chains/concrete/__init__.py | 22 +++-- .../swarmauri/chunkers/concrete/__init__.py | 26 +++--- .../conversations/concrete/__init__.py | 33 +++++--- .../swarmauri/distances/concrete/__init__.py | 53 ++++++------ .../swarmauri/documents/concrete/__init__.py | 12 ++- .../swarmauri/embeddings/concrete/__init__.py | 44 ++++------ .../swarmauri/exceptions/concrete/__init__.py | 14 +++- .../swarmauri/llms/concrete/__init__.py | 82 +++++++++---------- .../measurements/concrete/__init__.py | 47 +++++++++-- .../swarmauri/messages/concrete/__init__.py | 20 ++++- .../swarmauri/parsers/concrete/__init__.py | 58 ++++++------- .../swarmauri/prompts/concrete/__init__.py | 20 ++++- .../schema_converters/concrete/__init__.py | 58 +++++++------ .../swarmauri/swarms/concrete/__init__.py | 12 ++- .../swarmauri/toolkits/concrete/__init__.py | 19 +---- .../swarmauri/tools/concrete/__init__.py | 23 ++---- .../swarmauri/tracing/concrete/__init__.py | 22 +++-- .../swarmauri/swarmauri/utils/_lazy_import.py | 22 +++++ .../vector_stores/concrete/__init__.py | 32 +++----- .../swarmauri/vectors/concrete/__init__.py | 16 +++- 22 files changed, 383 insertions(+), 303 deletions(-) create mode 100644 pkgs/swarmauri/swarmauri/utils/_lazy_import.py diff --git a/pkgs/swarmauri/swarmauri/agent_factories/concrete/__init__.py b/pkgs/swarmauri/swarmauri/agent_factories/concrete/__init__.py index 651d9d992..8b75d563e 100644 --- a/pkgs/swarmauri/swarmauri/agent_factories/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/agent_factories/concrete/__init__.py @@ -1,8 +1,22 @@ -from swarmauri.agent_factories.concrete.agent_factory import AgentFactory -from swarmauri.agent_factories.concrete.conf_driven_agent_factory import ( - ConfDrivenAgentFactory, -) -from JsonAgentFactory import JsonAgentFactory -from swarmauri.agent_factories.concrete.ReflectionAgentFactory import ( - ReflectionAgentFactory, -) +from swarmauri.utils._lazy_import import _lazy_import + +# List of agent factory names (file names without the ".py" extension) and corresponding class names +agent_factory_files = [ + ("swarmauri.agent_factories.concrete.agent_factory", "AgentFactory"), + ( + "swarmauri.agent_factories.concrete.conf_driven_agent_factory", + "ConfDrivenAgentFactory", + ), + ("swarmauri.agent_factories.concrete.JsonAgentFactory", "JsonAgentFactory"), + ( + "swarmauri.agent_factories.concrete.ReflectionAgentFactory", + "ReflectionAgentFactory", + ), +] + +# Lazy loading of agent factories storing them in variables +for module_name, class_name in agent_factory_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded agent factories to __all__ +__all__ = [class_name for _, class_name in agent_factory_files] diff --git a/pkgs/swarmauri/swarmauri/agents/concrete/__init__.py b/pkgs/swarmauri/swarmauri/agents/concrete/__init__.py index f474dae0f..0103905cc 100644 --- a/pkgs/swarmauri/swarmauri/agents/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/agents/concrete/__init__.py @@ -1,27 +1,10 @@ -import importlib - -# Define a lazy loader function with a warning message if the module or class is not found -def _lazy_import(module_name, class_name): - try: - # Import the module - module = importlib.import_module(module_name) - # Dynamically get the class from the module - return getattr(module, class_name) - except ImportError: - # If module is not available, print a warning message - print(f"Warning: The module '{module_name}' is not available. " - f"Please install the necessary dependencies to enable this functionality.") - return None - except AttributeError: - # If class is not found, print a warning message - print(f"Warning: The class '{class_name}' was not found in module '{module_name}'.") - return None +from swarmauri.utils._lazy_import import _lazy_import # List of agent names (file names without the ".py" extension) and corresponding class names agent_files = [ - ("swarmauri.agents.concrete.SimpleConversationAgent", "SimpleConversationAgent"), ("swarmauri.agents.concrete.QAAgent", "QAAgent"), ("swarmauri.agents.concrete.RagAgent", "RagAgent"), + ("swarmauri.agents.concrete.SimpleConversationAgent", "SimpleConversationAgent"), ("swarmauri.agents.concrete.ToolAgent", "ToolAgent"), ] diff --git a/pkgs/swarmauri/swarmauri/chains/concrete/__init__.py b/pkgs/swarmauri/swarmauri/chains/concrete/__init__.py index efdd73eff..d6e508040 100644 --- a/pkgs/swarmauri/swarmauri/chains/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/chains/concrete/__init__.py @@ -1,11 +1,15 @@ -from swarmauri.chains.concrete.CallableChain import CallableChain -from swarmauri.chains.concrete.ChainStep import ChainStep -from swarmauri.chains.concrete.PromptContextChain import PromptContextChain -from swarmauri.chains.concrete.ContextChain import ContextChain +from swarmauri.utils._lazy_import import _lazy_import -__all__ = [ - "CallableChain", - "ChainStep", - "PromptContextChain", - "ContextChain", +chains_files = [ + ("swarmauri.chains.concrete.CallableChain import", "CallableChain"), + ("swarmauri.chains.concrete.ChainStep", "ChainStep"), + ("swarmauri.chains.concrete.PromptContextChain", "PromptContextChain"), + ("swarmauri.chains.concrete.ContextChain", "ContextChain"), ] + +# Lazy loading of chain classes, storing them in variables +for module_name, class_name in chains_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded chain classes to __all__ +__all__ = [class_name for _, class_name in chains_files] diff --git a/pkgs/swarmauri/swarmauri/chunkers/concrete/__init__.py b/pkgs/swarmauri/swarmauri/chunkers/concrete/__init__.py index f894163ad..fafead5cf 100644 --- a/pkgs/swarmauri/swarmauri/chunkers/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/chunkers/concrete/__init__.py @@ -1,13 +1,17 @@ -from swarmauri.chunkers.concrete.DelimiterBasedChunker import DelimiterBasedChunker -from swarmauri.chunkers.concrete.FixedLengthChunker import FixedLengthChunker -from swarmauri.chunkers.concrete.MdSnippetChunker import MdSnippetChunker -from swarmauri.chunkers.concrete.SentenceChunker import SentenceChunker -from swarmauri.chunkers.concrete.SlidingWindowChunker import SlidingWindowChunker +from swarmauri.utils._lazy_import import _lazy_import -__all__ = [ - "DelimiterBasedChunker", - "FixedLengthChunker", - "MdSnippetChunker", - "SentenceChunker", - "SlidingWindowChunker", +# List of chunker names (file names without the ".py" extension) and corresponding class names +chunkers_files = [ + ("swarmauri.chunkers.concrete.DelimiterBasedChunker", "DelimiterBasedChunker"), + ("swarmauri.chunkers.concrete.FixedLengthChunker", "FixedLengthChunker"), + ("swarmauri.chunkers.concrete.MdSnippetChunker", "MdSnippetChunker"), + ("swarmauri.chunkers.concrete.SentenceChunker", "SentenceChunker"), + ("swarmauri.chunkers.concrete.SlidingWindowChunker", "SlidingWindowChunker"), ] + +# Lazy loading of chunker classes, storing them in variables +for module_name, class_name in chunkers_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded chunker classes to __all__ +__all__ = [class_name for _, class_name in chunkers_files] diff --git a/pkgs/swarmauri/swarmauri/conversations/concrete/__init__.py b/pkgs/swarmauri/swarmauri/conversations/concrete/__init__.py index e51d24fe0..46179d6fb 100644 --- a/pkgs/swarmauri/swarmauri/conversations/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/conversations/concrete/__init__.py @@ -1,15 +1,22 @@ -from swarmauri.conversations.concrete.Conversation import Conversation -from swarmauri.conversations.concrete.MaxSystemContextConversation import ( - MaxSystemContextConversation, -) -from swarmauri.conversations.concrete.MaxSizeConversation import MaxSizeConversation -from swarmauri.conversations.concrete.SessionCacheConversation import ( - SessionCacheConversation, -) +from swarmauri.utils._lazy_import import _lazy_import -__all__ = [ - "Conversation", - "MaxSystemContextConversation", - "MaxSizeConversation", - "SessionCacheConversation", +# List of conversations names (file names without the ".py" extension) and corresponding class names +conversations_files = [ + ("swarmauri.conversations.concrete.Conversation", "Conversation"), + ( + "swarmauri.conversations.concrete.MaxSystemContextConversation", + "MaxSystemContextConversation", + ), + ("swarmauri.conversations.concrete.MaxSizeConversation", "MaxSizeConversation"), + ( + "swarmauri.conversations.concrete.SessionCacheConversation", + "SessionCacheConversation", + ), ] + +# Lazy loading of conversations classes, storing them in variables +for module_name, class_name in conversations_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded conversations classes to __all__ +__all__ = [class_name for _, class_name in conversations_files] diff --git a/pkgs/swarmauri/swarmauri/distances/concrete/__init__.py b/pkgs/swarmauri/swarmauri/distances/concrete/__init__.py index 9e163ca4d..033a0dd13 100644 --- a/pkgs/swarmauri/swarmauri/distances/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/distances/concrete/__init__.py @@ -1,34 +1,27 @@ -import importlib +from swarmauri.utils._lazy_import import _lazy_import -# Define a lazy loader function with a warning message if the module is not found -def _lazy_import(module_name, module_description=None): - try: - return importlib.import_module(module_name) - except ImportError: - # If module is not available, print a warning message - print(f"Warning: The module '{module_description or module_name}' is not available. " - f"Please install the necessary dependencies to enable this functionality.") - return None - -# List of distance names (file names without the ".py" extension) -distance_files = [ - "CanberraDistance", - "ChebyshevDistance", - "ChiSquaredDistance", - "CosineDistance", - "EuclideanDistance", - "HaversineDistance", - "JaccardIndexDistance", - "LevenshteinDistance", - "ManhattanDistance", - "MinkowskiDistance", - "SorensenDiceDistance", - "SquaredEuclideanDistance", +# List of distances names (file names without the ".py" extension) and corresponding class names +distances_files = [ + ("swarmauri.distances.concrete.CanberraDistance", "CanberraDistance"), + ("swarmauri.distances.concrete.ChebyshevDistance", "ChebyshevDistance"), + ("swarmauri.distances.concrete.ChiSquaredDistance", "ChiSquaredDistance"), + ("swarmauri.distances.concrete.CosineDistance", "CosineDistance"), + ("swarmauri.distances.concrete.EuclideanDistance", "EuclideanDistance"), + ("swarmauri.distances.concrete.HaversineDistance", "HaversineDistance"), + ("swarmauri.distances.concrete.JaccardIndexDistance", "JaccardIndexDistance"), + ("swarmauri.distances.concrete.LevenshteinDistance", "LevenshteinDistance"), + ("swarmauri.distances.concrete.ManhattanDistance", "ManhattanDistance"), + ("swarmauri.distances.concrete.MinkowskiDistance", "MinkowskiDistance"), + ("swarmauri.distances.concrete.SorensenDiceDistance", "SorensenDiceDistance"), + ( + "swarmauri.distances.concrete.SquaredEuclideanDistance", + "SquaredEuclideanDistance", + ), ] -# Lazy loading of distance modules, storing them in variables -for distance in distance_files: - globals()[distance] = _lazy_import(f"swarmauri.distances.concrete.{distance}", distance) +# Lazy loading of distances classes, storing them in variables +for module_name, class_name in distances_files: + globals()[class_name] = _lazy_import(module_name, class_name) -# Adding the lazy-loaded distance modules to __all__ -__all__ = distance_files +# Adding the lazy-loaded distances classes to __all__ +__all__ = [class_name for _, class_name in distances_files] diff --git a/pkgs/swarmauri/swarmauri/documents/concrete/__init__.py b/pkgs/swarmauri/swarmauri/documents/concrete/__init__.py index f0725fde0..c4b50e1a5 100644 --- a/pkgs/swarmauri/swarmauri/documents/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/documents/concrete/__init__.py @@ -1 +1,11 @@ -from swarmauri.documents.concrete import * +from swarmauri.utils._lazy_import import _lazy_import + +# List of documents names (file names without the ".py" extension) and corresponding class names +documents_files = [("swarmauri.documents.concrete.Document", "Document")] + +# Lazy loading of documents classes, storing them in variables +for module_name, class_name in documents_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded documents classes to __all__ +__all__ = [class_name for _, class_name in documents_files] diff --git a/pkgs/swarmauri/swarmauri/embeddings/concrete/__init__.py b/pkgs/swarmauri/swarmauri/embeddings/concrete/__init__.py index a4fd73974..c6d12f871 100644 --- a/pkgs/swarmauri/swarmauri/embeddings/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/embeddings/concrete/__init__.py @@ -1,31 +1,19 @@ -import importlib +from swarmauri.utils._lazy_import import _lazy_import -# Define a lazy loader function with a warning message if the module is not found -def _lazy_import(module_name, module_description=None): - try: - return importlib.import_module(module_name) - except ImportError: - # If module is not available, print a warning message - print(f"Warning: The module '{module_description or module_name}' is not available. " - f"Please install the necessary dependencies to enable this functionality.") - return None +# List of embeddings names (file names without the ".py" extension) and corresponding class names +embeddings_files = [ + ("swarmauri.embeddings.concrete.CohereEmbedding", "CohereEmbedding"), + ("swarmauri.embeddings.concrete.GeminiEmbedding", "GeminiEmbedding"), + ("swarmauri.embeddings.concrete.MistralEmbedding", "MistralEmbedding"), + ("swarmauri.embeddings.concrete.NmfEmbedding", "NmfEmbedding"), + ("swarmauri.embeddings.concrete.OpenAIEmbedding", "OpenAIEmbedding"), + ("swarmauri.embeddings.concrete.TfidfEmbedding", "TfidfEmbedding"), + ("swarmauri.embeddings.concrete.VoyageEmbedding", "VoyageEmbedding"), +] -# Lazy loading of embeddings with descriptive names -# Doc2VecEmbedding = _lazy_import("swarmauri.embeddings.concrete.Doc2VecEmbedding", "Doc2VecEmbedding") -GeminiEmbedding = _lazy_import("swarmauri.embeddings.concrete.GeminiEmbedding", "GeminiEmbedding") -MistralEmbedding = _lazy_import("swarmauri.embeddings.concrete.MistralEmbedding", "MistralEmbedding") -# MlmEmbedding = _lazy_import("swarmauri.embeddings.concrete.MlmEmbedding", "MlmEmbedding") -NmfEmbedding = _lazy_import("swarmauri.embeddings.concrete.NmfEmbedding", "NmfEmbedding") -OpenAIEmbedding = _lazy_import("swarmauri.embeddings.concrete.OpenAIEmbedding", "OpenAIEmbedding") -TfidfEmbedding = _lazy_import("swarmauri.embeddings.concrete.TfidfEmbedding", "TfidfEmbedding") +# Lazy loading of embeddings classes, storing them in variables +for module_name, class_name in embeddings_files: + globals()[class_name] = _lazy_import(module_name, class_name) -# Adding lazy-loaded modules to __all__ -__all__ = [ - # "Doc2VecEmbedding", - "GeminiEmbedding", - "MistralEmbedding", - # "MlmEmbedding", - "NmfEmbedding", - "OpenAIEmbedding", - "TfidfEmbedding", -] +# Adding the lazy-loaded embeddings classes to __all__ +__all__ = [class_name for _, class_name in embeddings_files] diff --git a/pkgs/swarmauri/swarmauri/exceptions/concrete/__init__.py b/pkgs/swarmauri/swarmauri/exceptions/concrete/__init__.py index 43b631bc1..2baf7a56d 100644 --- a/pkgs/swarmauri/swarmauri/exceptions/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/exceptions/concrete/__init__.py @@ -1,3 +1,13 @@ -from swarmauri.exceptions.concrete.IndexErrorWithContext import IndexErrorWithContext +from swarmauri.utils._lazy_import import _lazy_import -__all__ = ["IndexErrorWithContext"] +# List of exceptions names (file names without the ".py" extension) and corresponding class names +exceptions_files = [ + ("swarmauri.exceptions.concrete.IndexErrorWithContext", "IndexErrorWithContext"), +] + +# Lazy loading of exceptions classes, storing them in variables +for module_name, class_name in exceptions_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded exceptions classes to __all__ +__all__ = [class_name for _, class_name in exceptions_files] diff --git a/pkgs/swarmauri/swarmauri/llms/concrete/__init__.py b/pkgs/swarmauri/swarmauri/llms/concrete/__init__.py index a24e7b59f..975ac7e93 100644 --- a/pkgs/swarmauri/swarmauri/llms/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/llms/concrete/__init__.py @@ -1,47 +1,43 @@ -import importlib +from swarmauri.utils._lazy_import import _lazy_import -# Define a lazy loader function with a warning message if the module is not found -def _lazy_import(module_name, module_description=None): - try: - return importlib.import_module(module_name) - except ImportError: - # If module is not available, print a warning message - print(f"Warning: The module '{module_description or module_name}' is not available. " - f"Please install the necessary dependencies to enable this functionality.") - return None - -# List of model names (file names without the ".py" extension) -model_files = [ - "AI21StudioModel", - "AnthropicModel", - "AnthropicToolModel", - "BlackForestimgGenModel", - "CohereModel", - "CohereToolModel", - "DeepInfraImgGenModel", - "DeepInfraModel", - "DeepSeekModel", - "FalAllImgGenModel", - "FalAVisionModel", - "GeminiProModel", - "GeminiToolModel", - "GroqAudio", - "GroqModel", - "GroqToolModel", - "GroqVisionModel", - "MistralModel", - "MistralToolModel", - "OpenAIGenModel", - "OpenAIModel", - "OpenAIToolModel", - "PerplexityModel", - "PlayHTModel", - "WhisperLargeModel", +# List of llms names (file names without the ".py" extension) and corresponding class names +llms_files = [ + ("swarmauri.llms.concrete.AI21StudioModel", "AI21StudioModel"), + ("swarmauri.llms.concrete.AnthropicModel", "AnthropicModel"), + ("swarmauri.llms.concrete.AnthropicToolModel", "AnthropicToolModel"), + ("swarmauri.llms.concrete.BlackForestImgGenModel", "BlackForestImgGenModel"), + ("swarmauri.llms.concrete.CohereModel", "CohereModel"), + ("swarmauri.llms.concrete.CohereToolModel", "CohereToolModel"), + ("swarmauri.llms.concrete.DeepInfraImgGenModel", "DeepInfraImgGenModel"), + ("swarmauri.llms.concrete.DeepInfraModel", "DeepInfraModel"), + ("swarmauri.llms.concrete.DeepSeekModel", "DeepSeekModel"), + ("swarmauri.llms.concrete.FalAIImgGenModel", "FalaiImgGenModel"), + ("swarmauri.llms.concrete.FalAIVisionModel", "FalAIVisionModel"), + ("swarmauri.llms.concrete.GeminiProModel", "GeminiProModel"), + ("swarmauri.llms.concrete.GeminiToolModel", "GeminiToolModel"), + ("swarmauri.llms.concrete.GroqAIAudio", "GroqAIAudio"), + ("swarmauri.llms.concrete.GroqModel", "GroqModel"), + ("swarmauri.llms.concrete.GroqToolModel", "GroqToolModel"), + ("swarmauri.llms.concrete.GroqVisionModel", "GroqVisionModel"), + ("swarmauri.llms.concrete.HyperbolicAudioTTS", "HyperbolicAudioTTS"), + ("swarmauri.llms.concrete.HyperbolicImgGenModel", "HyperbolicImgGenModel"), + ("swarmauri.llms.concrete.HyperbolicModel", "HyperbolicModel"), + ("swarmauri.llms.concrete.HyperbolicVisionModel", "HyperbolicVisionModel"), + ("swarmauri.llms.concrete.MistralModel", "MistralModel"), + ("swarmauri.llms.concrete.MistralToolModel", "MistralToolModel"), + ("swarmauri.llms.concrete.OpenAIAudio", "OpenAIAudio"), + ("swarmauri.llms.concrete.OpenAIAudioTTS", "OpenAIAudioTTS"), + ("swarmauri.llms.concrete.OpenAIImgGenModel", "OpenAIImgGenModel"), + ("swarmauri.llms.concrete.OpenAIModel", "OpenAIModel"), + ("swarmauri.llms.concrete.OpenAIToolModel", "OpenAIToolModel"), + ("swarmauri.llms.concrete.PerplexityModel", "PerplexityModel"), + ("swarmauri.llms.concrete.PlayHTModel", "PlayHTModel"), + ("swarmauri.llms.concrete.WhisperLargeModel", "WhisperLargeModel"), ] -# Lazy loading of models, storing them in variables -for model in model_files: - globals()[model] = _lazy_import(f"swarmauri.llms.concrete.{model}", model) +# Lazy loading of llms classes, storing them in variables +for module_name, class_name in llms_files: + globals()[class_name] = _lazy_import(module_name, class_name) -# Adding the lazy-loaded models to __all__ -__all__ = model_files +# Adding the lazy-loaded llms classes to __all__ +__all__ = [class_name for _, class_name in llms_files] diff --git a/pkgs/swarmauri/swarmauri/measurements/concrete/__init__.py b/pkgs/swarmauri/swarmauri/measurements/concrete/__init__.py index e340b2b85..ea47cb17f 100644 --- a/pkgs/swarmauri/swarmauri/measurements/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/measurements/concrete/__init__.py @@ -1,6 +1,41 @@ -from swarmauri.measurements.concrete.FirstImpressionMeasurement import FirstImpressionMeasurement -from swarmauri.measurements.concrete.MeanMeasurement import MeanMeasurement -from swarmauri.measurements.concrete.PatternMatchingMeasurement import PatternMatchingMeasurement -from swarmauri.measurements.concrete.RatioOfSumsMeasurement import RatioOfSumsMeasurement -from swarmauri.measurements.concrete.StaticMeasurement import StaticMeasurement -from swarmauri.measurements.concrete.ZeroMeasurement import ZeroMeasurement +from swarmauri.utils._lazy_import import _lazy_import + +# List of measurements names (file names without the ".py" extension) and corresponding class names +measurements_files = [ + ( + "swarmauri.measurements.concrete.CompletenessMeasurement", + "CompletenessMeasurement", + ), + ( + "swarmauri.measurements.concrete.DistinctivenessMeasurement", + "DistinctivenessMeasurement", + ), + ( + "swarmauri.measurements.concrete.FirstImpressionMeasurement", + "FirstImpressionMeasurement", + ), + ("swarmauri.measurements.concrete.MeanMeasurement", "MeanMeasurement"), + ("swarmauri.measurements.concrete.MiscMeasurement", "MiscMeasurement"), + ( + "swarmauri.measurements.concrete.MissingnessMeasurement", + "MissingnessMeasurement", + ), + ( + "swarmauri.measurements.concrete.PatternMatchingMeasurement", + "PatternMatchingMeasurement", + ), + ( + "swarmauri.measurements.concrete.RatioOfSumsMeasurement", + "RatioOfSumsMeasurement", + ), + ("swarmauri.measurements.concrete.StaticMeasurement", "StaticMeasurement"), + ("swarmauri.measurements.concrete.UniquenessMeasurement", "UniquenessMeasurement"), + ("swarmauri.measurements.concrete.ZeroMeasurement", "ZeroMeasurement"), +] + +# Lazy loading of measurements classes, storing them in variables +for module_name, class_name in measurements_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded measurements classes to __all__ +__all__ = [class_name for _, class_name in measurements_files] diff --git a/pkgs/swarmauri/swarmauri/messages/concrete/__init__.py b/pkgs/swarmauri/swarmauri/messages/concrete/__init__.py index 5c619ecc8..716bd57c5 100644 --- a/pkgs/swarmauri/swarmauri/messages/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/messages/concrete/__init__.py @@ -1,4 +1,16 @@ -from swarmauri.messages.concrete.HumanMessage import HumanMessage -from swarmauri.messages.concrete.AgentMessage import AgentMessage -from swarmauri.messages.concrete.FunctionMessage import FunctionMessage -from swarmauri.messages.concrete.SystemMessage import SystemMessage +from swarmauri.utils._lazy_import import _lazy_import + +# List of messages names (file names without the ".py" extension) and corresponding class names +messages_files = [ + ("swarmauri.messages.concrete.HumanMessage", "HumanMessage"), + ("swarmauri.messages.concrete.AgentMessage", "AgentMessage"), + ("from swarmauri.messages.concrete.FunctionMessage", "FunctionMessage"), + ("swarmauri.messages.concrete.SystemMessage", "SystemMessage"), +] + +# Lazy loading of messages classes, storing them in variables +for module_name, class_name in messages_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded messages classes to __all__ +__all__ = [class_name for _, class_name in messages_files] diff --git a/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py b/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py index a42b1b55f..fb730763f 100644 --- a/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/parsers/concrete/__init__.py @@ -1,37 +1,29 @@ -import importlib +from swarmauri.utils._lazy_import import _lazy_import -# Define a lazy loader function with a warning message if the module is not found -def _lazy_import(module_name, module_description=None): - try: - return importlib.import_module(module_name) - except ImportError: - # If module is not available, print a warning message - print(f"Warning: The module '{module_description or module_name}' is not available. " - f"Please install the necessary dependencies to enable this functionality.") - return None - -# List of parser names (file names without the ".py" extension) -parser_files = [ - "BeautifulSoupElementParser", - # "BERTEmbeddingParser", - "CSVParser", - "EntityRecognitionParser", - "HTMLTagStripParser", - "KeywordExtractorParser", - "Md2HtmlParser", - "OpenAPISpecParser", - "PhoneNumberExtractorParser", - "PythonParser", - "RegExParser", - # "TextBlobNounParser", - # "TextBlobSentenceParser", - "URLExtractorParser", - "XMLParser", +# List of parsers names (file names without the ".py" extension) and corresponding class names +parsers_files = [ + ( + "swarmauri.parsers.concrete.BeautifulSoupElementParser", + "BeautifulSoupElementParser", + ), + ("swarmauri.parsers.concrete.CSVParser", "CSVParser"), + ("swarmauri.parsers.concrete.HTMLTagStripParser", "HTMLTagStripParser"), + ("swarmauri.parsers.concrete.KeywordExtractorParser", "KeywordExtractorParser"), + ("swarmauri.parsers.concrete.Md2HtmlParser", "Md2HtmlParser"), + ("swarmauri.parsers.concrete.OpenAPISpecParser", "OpenAPISpecParser"), + ( + "swarmauri.parsers.concrete.PhoneNumberExtractorParser", + "PhoneNumberExtractorParser", + ), + ("swarmauri.parsers.concrete.PythonParser", "PythonParser"), + ("swarmauri.parsers.concrete.RegExParser", "RegExParser"), + ("swarmauri.parsers.concrete.URLExtractorParser", "URLExtractorParser"), + ("swarmauri.parsers.concrete.XMLParser", "XMLParser"), ] -# Lazy loading of parser modules, storing them in variables -for parser in parser_files: - globals()[parser] = _lazy_import(f"swarmauri.parsers.concrete.{parser}", parser) +# Lazy loading of parsers classes, storing them in variables +for module_name, class_name in parsers_files: + globals()[class_name] = _lazy_import(module_name, class_name) -# Adding the lazy-loaded parser modules to __all__ -__all__ = parser_files +# Adding the lazy-loaded parsers classes to __all__ +__all__ = [class_name for _, class_name in parsers_files] diff --git a/pkgs/swarmauri/swarmauri/prompts/concrete/__init__.py b/pkgs/swarmauri/swarmauri/prompts/concrete/__init__.py index 00d6b3cb9..3755b609f 100644 --- a/pkgs/swarmauri/swarmauri/prompts/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/prompts/concrete/__init__.py @@ -1,4 +1,16 @@ -from swarmauri.prompts.concrete.Prompt import Prompt -from swarmauri.prompts.concrete.PromptGenerator import PromptGenerator -from swarmauri.prompts.concrete.PromptMatrix import PromptMatrix -from swarmauri.prompts.concrete.PromptTemplate import PromptTemplate +from swarmauri.utils._lazy_import import _lazy_import + +# List of prompts names (file names without the ".py" extension) and corresponding class names +prompts_files = [ + ("swarmauri.prompts.concrete.Prompt", "Prompt"), + ("swarmauri.prompts.concrete.PromptGenerator", "PromptGenerator"), + ("swarmauri.prompts.concrete.PromptMatrix", "PromptMatrix"), + ("from swarmauri.prompts.concrete.PromptTemplate", "PromptTemplate"), +] + +# Lazy loading of prompts classes, storing them in variables +for module_name, class_name in prompts_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded prompts classes to __all__ +__all__ = [class_name for _, class_name in prompts_files] diff --git a/pkgs/swarmauri/swarmauri/schema_converters/concrete/__init__.py b/pkgs/swarmauri/swarmauri/schema_converters/concrete/__init__.py index c608d8c11..65044d64d 100644 --- a/pkgs/swarmauri/swarmauri/schema_converters/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/schema_converters/concrete/__init__.py @@ -1,29 +1,37 @@ -import importlib +from swarmauri.utils._lazy_import import _lazy_import -# Define a lazy loader function with a warning message if the module is not found -def _lazy_import(module_name, module_description=None): - try: - return importlib.import_module(module_name) - except ImportError: - # If module is not available, print a warning message - print(f"Warning: The module '{module_description or module_name}' is not available. " - f"Please install the necessary dependencies to enable this functionality.") - return None - -# List of schema converter names (file names without the ".py" extension) -schema_converter_files = [ - "AnthropicSchemaConverter", - "CohereSchemaConverter", - "GeminiSchemaConverter", - "GroqSchemaConverter", - "MistralSchemaConverter", - "OpenAISchemaConverter", - "ShuttleAISchemaConverter", +# List of schema_converters names (file names without the ".py" extension) and corresponding class names +schema_converters_files = [ + ( + "swarmauri.schema_converters.concrete.AnthropicSchemaConverter", + "AnthropicSchemaConverter", + ), + ( + "swarmauri.schema_converters.concrete.CohereSchemaConverter", + "CohereSchemaConverter", + ), + ( + "swarmauri.schema_converters.concrete.GeminiSchemaConverter", + "GeminiSchemaConverter", + ), + ("swarmauri.schema_converters.concrete.GroqSchemaConverter", "GroqSchemaConverter"), + ( + "swarmauri.schema_converters.concrete.MistralSchemaConverter", + "MistralSchemaConverter", + ), + ( + "swarmauri.schema_converters.concrete.OpenAISchemaConverter", + "OpenAISchemaConverter", + ), + ( + "swarmauri.schema_converters.concrete.ShuttleAISchemaConverter", + "ShuttleAISchemaConverter", + ), ] -# Lazy loading of schema converters, storing them in variables -for schema_converter in schema_converter_files: - globals()[schema_converter] = _lazy_import(f"swarmauri.schema_converters.concrete.{schema_converter}", schema_converter) +# Lazy loading of schema_converters classes, storing them in variables +for module_name, class_name in schema_converters_files: + globals()[class_name] = _lazy_import(module_name, class_name) -# Adding the lazy-loaded schema converters to __all__ -__all__ = schema_converter_files +# Adding the lazy-loaded schema_converters classes to __all__ +__all__ = [class_name for _, class_name in schema_converters_files] diff --git a/pkgs/swarmauri/swarmauri/swarms/concrete/__init__.py b/pkgs/swarmauri/swarmauri/swarms/concrete/__init__.py index bd32d1999..61f84eae6 100644 --- a/pkgs/swarmauri/swarmauri/swarms/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/swarms/concrete/__init__.py @@ -1 +1,11 @@ -from swarmauri.swarms.concrete.SimpleSwarmFactory import SimpleSwarmFactory +from swarmauri.utils._lazy_import import _lazy_import + +# List of swarms names (file names without the ".py" extension) and corresponding class names +swarms_files = [("swarmauri.swarms.concrete.SimpleSwarmFactory", "SimpleSwarmFactory")] + +# Lazy loading of swarms classes, storing them in variables +for module_name, class_name in swarms_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded swarms classes to __all__ +__all__ = [class_name for _, class_name in swarms_files] diff --git a/pkgs/swarmauri/swarmauri/toolkits/concrete/__init__.py b/pkgs/swarmauri/swarmauri/toolkits/concrete/__init__.py index 87127d6bf..a7311c7c9 100644 --- a/pkgs/swarmauri/swarmauri/toolkits/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/toolkits/concrete/__init__.py @@ -1,21 +1,4 @@ -import importlib - -# Define a lazy loader function with a warning message if the module or class is not found -def _lazy_import(module_name, class_name): - try: - # Import the module - module = importlib.import_module(module_name) - # Dynamically get the class from the module - return getattr(module, class_name) - except ImportError: - # If module is not available, print a warning message - print(f"Warning: The module '{module_name}' is not available. " - f"Please install the necessary dependencies to enable this functionality.") - return None - except AttributeError: - # If class is not found, print a warning message - print(f"Warning: The class '{class_name}' was not found in module '{module_name}'.") - return None +from swarmauri.utils._lazy_import import _lazy_import # List of toolkit names (file names without the ".py" extension) and corresponding class names toolkit_files = [ diff --git a/pkgs/swarmauri/swarmauri/tools/concrete/__init__.py b/pkgs/swarmauri/swarmauri/tools/concrete/__init__.py index f9d2a297e..5b7d61054 100644 --- a/pkgs/swarmauri/swarmauri/tools/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/tools/concrete/__init__.py @@ -1,25 +1,12 @@ -import importlib - -# Define a lazy loader function with a warning message if the module or class is not found -def _lazy_import(module_name, class_name): - try: - # Import the module - module = importlib.import_module(module_name) - # Dynamically get the class from the module - return getattr(module, class_name) - except ImportError: - # If module is not available, print a warning message - print(f"Warning: The module '{module_name}' is not available. " - f"Please install the necessary dependencies to enable this functionality.") - return None - except AttributeError: - print(f"Warning: The class '{class_name}' was not found in module '{module_name}'.") - return None +from swarmauri.utils._lazy_import import _lazy_import # List of tool names (file names without the ".py" extension) and corresponding class names tool_files = [ ("swarmauri.tools.concrete.AdditionTool", "AdditionTool"), - ("swarmauri.tools.concrete.AutomatedReadabilityIndexTool", "AutomatedReadabilityIndexTool"), + ( + "swarmauri.tools.concrete.AutomatedReadabilityIndexTool", + "AutomatedReadabilityIndexTool", + ), ("swarmauri.tools.concrete.CalculatorTool", "CalculatorTool"), ("swarmauri.tools.concrete.CodeExtractorTool", "CodeExtractorTool"), ("swarmauri.tools.concrete.CodeInterpreterTool", "CodeInterpreterTool"), diff --git a/pkgs/swarmauri/swarmauri/tracing/concrete/__init__.py b/pkgs/swarmauri/swarmauri/tracing/concrete/__init__.py index 95900d024..1b6619352 100644 --- a/pkgs/swarmauri/swarmauri/tracing/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/tracing/concrete/__init__.py @@ -1,5 +1,17 @@ -from swarmauri.tracing.concrete.CallableTracer import CallableTracer -from swarmauri.tracing.concrete.ChainTracer import ChainTracer -from swarmauri.tracing.concrete.SimpleTraceContext import SimpleTraceContext -from swarmauri.tracing.concrete.TracedVariable import TracedVariable -from swarmauri.tracing.concrete.VariableTracer import VariableTracer +from swarmauri.utils._lazy_import import _lazy_import + +# List of tracing names (file names without the ".py" extension) and corresponding class names +tracing_files = [ + ("swarmauri.tracing.concrete.CallableTracer", "CallableTracer"), + ("from swarmauri.tracing.concrete.ChainTracer", "ChainTracer"), + ("swarmauri.tracing.concrete.SimpleTraceContext", "SimpleTraceContext"), + ("swarmauri.tracing.concrete.TracedVariable", "TracedVariable"), + ("swarmauri.tracing.concrete.VariableTracer", "VariableTracer"), +] + +# Lazy loading of tracings, storing them in variables +for module_name, class_name in tracing_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded tracings to __all__ +__all__ = [class_name for _, class_name in tracing_files] diff --git a/pkgs/swarmauri/swarmauri/utils/_lazy_import.py b/pkgs/swarmauri/swarmauri/utils/_lazy_import.py new file mode 100644 index 000000000..a3d3bd34a --- /dev/null +++ b/pkgs/swarmauri/swarmauri/utils/_lazy_import.py @@ -0,0 +1,22 @@ +import importlib + + +# Define a lazy loader function with a warning message if the module or class is not found +def _lazy_import(module_name, class_name): + try: + # Import the module + module = importlib.import_module(module_name) + # Dynamically get the class from the module + return getattr(module, class_name) + except ImportError: + # If module is not available, print a warning message + print( + f"Warning: The module '{module_name}' is not available. " + f"Please install the necessary dependencies to enable this functionality." + ) + return None + except AttributeError: + print( + f"Warning: The class '{class_name}' was not found in module '{module_name}'." + ) + return None diff --git a/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py b/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py index 2f946b377..ceb2b245c 100644 --- a/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/vector_stores/concrete/__init__.py @@ -1,26 +1,14 @@ -import importlib +from swarmauri.utils._lazy_import import _lazy_import -# Define a lazy loader function with a warning message if the module is not found -def _lazy_import(module_name, module_description=None): - try: - return importlib.import_module(module_name) - except ImportError: - # If module is not available, print a warning message - print(f"Warning: The module '{module_description or module_name}' is not available. " - f"Please install the necessary dependencies to enable this functionality.") - return None - -# List of vector store names (file names without the ".py" extension) -vector_store_files = [ - # "Doc2VecVectorStore", - # "MlmVectorStore", - "SqliteVectorStore", - "TfidfVectorStore", +# List of vectore_stores names (file names without the ".py" extension) and corresponding class names +vectore_stores_files = [ + ("swarmauri.vector_stores.concrete.SqliteVectorStore", "SqliteVectorStore"), + ("swarmauri.vector_stores.concrete.TfidfVectorStore", "TfidfVectorStore"), ] -# Lazy loading of vector stores, storing them in variables -for vector_store in vector_store_files: - globals()[vector_store] = _lazy_import(f"swarmauri.vector_stores.concrete.{vector_store}", vector_store) +# Lazy loading of vectore_storess, storing them in variables +for module_name, class_name in vectore_stores_files: + globals()[class_name] = _lazy_import(module_name, class_name) -# Adding the lazy-loaded vector stores to __all__ -__all__ = vector_store_files +# Adding the lazy-loaded vectore_storess to __all__ +__all__ = [class_name for _, class_name in vectore_stores_files] diff --git a/pkgs/swarmauri/swarmauri/vectors/concrete/__init__.py b/pkgs/swarmauri/swarmauri/vectors/concrete/__init__.py index 16f348f20..7283bc0a9 100644 --- a/pkgs/swarmauri/swarmauri/vectors/concrete/__init__.py +++ b/pkgs/swarmauri/swarmauri/vectors/concrete/__init__.py @@ -1,4 +1,14 @@ -# -*- coding: utf-8 -*- +from swarmauri.utils._lazy_import import _lazy_import -from swarmauri.vectors.concrete.Vector import Vector -from swarmauri.vectors.concrete.VectorProductMixin import VectorProductMixin +# List of vectors names (file names without the ".py" extension) and corresponding class names +vectors_files = [ + ("swarmauri.vectors.concrete.Vector", "Vector"), + ("swarmauri.vectors.concrete.VectorProductMixin", "VectorProductMixin"), +] + +# Lazy loading of vectorss, storing them in variables +for module_name, class_name in vectors_files: + globals()[class_name] = _lazy_import(module_name, class_name) + +# Adding the lazy-loaded vectorss to __all__ +__all__ = [class_name for _, class_name in vectors_files] From 4823f078738e231651bdc1ce17df7c38d8a4624b Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:38:25 -0600 Subject: [PATCH 20/42] comm - Update pyproject.toml --- pkgs/community/pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index 332deb6bd..a2eb33d1c 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -15,7 +15,7 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.10,<3.13" -swarmauri = "==0.5.2" +swarmauri = "==0.5.3.dev1" typing_extensions = "*" matplotlib = { version = ">=3.9.2", optional = true } nltk = { version = "^3.9.1", optional = true } @@ -88,4 +88,4 @@ log_cli = true log_cli_level = "INFO" log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" log_cli_date_format = "%Y-%m-%d %H:%M:%S" -asyncio_default_fixture_loop_scope = "function" \ No newline at end of file +asyncio_default_fixture_loop_scope = "function" From d7754930db05f18807d524a2b86a98bc87a88942 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:42:21 -0600 Subject: [PATCH 21/42] swarm - Update pyproject.toml --- pkgs/swarmauri/pyproject.toml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index 58608f84c..27656153a 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri" -version = "0.5.3.dev1" +version = "0.5.3.dev2" description = "This repository includes base classes, concrete generics, and concrete standard components within the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -52,7 +52,11 @@ matplotlib = { version = ">=3.9.2", optional = true } # Extras without versioning, grouped for specific use cases io = ["aiofiles", "aiohttp"] #llms = ["cohere", "mistralai", "fal-client", "google-generativeai", "openai"] -nlp = ["nltk", "textblob", "yake"] +nlp = [ + #"nltk", + #"textblob", + "yake" +] nlp_tools = ["beautifulsoup4"] #ml_toolkits = ["gensim", "scipy", "scikit-learn"] #spacy = ["spacy"] From b2da5246038e50b8916779695c684e099b593daa Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:43:14 -0600 Subject: [PATCH 22/42] cicd - Update test_changed_files.yaml --- .github/workflows/test_changed_files.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test_changed_files.yaml b/.github/workflows/test_changed_files.yaml index 2f28a48fd..fbe485428 100644 --- a/.github/workflows/test_changed_files.yaml +++ b/.github/workflows/test_changed_files.yaml @@ -126,7 +126,7 @@ jobs: - name: Install package dependencies run: | cd pkgs/${{ matrix.package_tests.package }} - poetry install --no-cache -vv + poetry install --no-cache --all-extras -vv - name: Run all tests for the package run: | From 80821d3a909606f9f2324c09031eb4a407874e3e Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:48:47 -0600 Subject: [PATCH 23/42] Create increment_version_dev.yaml --- .github/workflows/increment_version_dev.yaml | 85 ++++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 .github/workflows/increment_version_dev.yaml diff --git a/.github/workflows/increment_version_dev.yaml b/.github/workflows/increment_version_dev.yaml new file mode 100644 index 000000000..551b83093 --- /dev/null +++ b/.github/workflows/increment_version_dev.yaml @@ -0,0 +1,85 @@ +name: Increment Versions + +on: + workflow_dispatch: + +jobs: + increment-versions: + runs-on: ubuntu-latest + + steps: + - name: Check out code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install TOML editor + run: | + python -m pip install --upgrade pip + pip install tomlkit + + - name: Increment versions in pyproject.toml + run: | + echo "Incrementing versions..." + find . -name "pyproject.toml" | while read -r pyproject; do + echo "Processing $pyproject" + + # Extract current version + CURRENT_VERSION=$(python -c " +import tomlkit +with open('$pyproject', 'r') as f: + data = tomlkit.parse(f.read()) + print(data['tool']['poetry']['version']) +") + + # Increment version + BASE_VERSION=$(echo "$CURRENT_VERSION" | sed -E 's/(.*)-dev.*/\1/') + DEV_PART=$(echo "$CURRENT_VERSION" | grep -oE 'dev[0-9]+$' | grep -oE '[0-9]+') + NEW_DEV_PART=$((DEV_PART + 1)) + NEW_VERSION="${BASE_VERSION}-dev${NEW_DEV_PART:-1}" + + echo "Updating version from $CURRENT_VERSION to $NEW_VERSION" + + # Update version in pyproject.toml + python -c " +import tomlkit +with open('$pyproject', 'r') as f: + data = tomlkit.parse(f.read()) +data['tool']['poetry']['version'] = '$NEW_VERSION' +with open('$pyproject', 'w') as f: + f.write(tomlkit.dumps(data)) +" + + # Update dependencies starting with 'swarmauri' + python -c " +import tomlkit +with open('$pyproject', 'r') as f: + data = tomlkit.parse(f.read()) +dependencies = data['tool']['poetry'].get('dependencies', {}) +for dep, version in dependencies.items(): + if dep.startswith('swarmauri') and isinstance(version, str): + base_version = version.split('-dev')[0] + dev_part = int(version.split('-dev')[-1]) if '-dev' in version else 0 + new_version = f'{base_version}-dev{dev_part + 1}' + dependencies[dep] = new_version +data['tool']['poetry']['dependencies'] = dependencies +with open('$pyproject', 'w') as f: + f.write(tomlkit.dumps(data)) +" + done + + - name: Commit changes + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git add . + git commit -m "Incremented versions in pyproject.toml files" + + - name: Push changes + uses: ad-m/github-push-action@v0.6.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branch: ${{ github.ref_name }} From 32af6d3d05221ed240f76afd09aec2ac9205b3c3 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:52:02 -0600 Subject: [PATCH 24/42] comm - Update pyproject.toml --- pkgs/community/pyproject.toml | 46 +++++++++++++++++------------------ 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index a2eb33d1c..5f4871f59 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-community" -version = "0.5.2.dev20" +version = "0.5.3.dev2" description = "This repository includes Swarmauri community components." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -27,9 +27,25 @@ torch = { version = "^2.5.0", optional = true } leptonai = { version = "==0.22.0", optional = true } redis = { version = "^4.0", optional = true } pinecone-client = { version = ">=2.0.0", optional = true, extras = ["grpc"] } -#protobuf = { version = "^3.20.0", optional = true } -#numba = { version = ">=0.59.0", optional = true } -#pacmap = { version = "==0.7.3", optional = true } +textstat = { version = "^0.7.0", optional = true } +annoy = { version = "^1.17.1", optional = true } +folium = { version = "^0.14.0", optional = true } +duckdb = { version = "^0.8.1", optional = true } +neo4j = { version = "^5.12.0", optional = true } +chromadb = { version = "^0.4.4", optional = true } +qdrant_client = { version = "^1.2.0", optional = true } +weaviate-client = { version = "^3.12.0", optional = true } +PyPDF2 = { version = "^3.0.0", optional = true } +pymupdf = { version = "^1.21.1", optional = true } +pypdftk = { version = "^0.4.1", optional = true } +psutil = { version = "^5.9.5", optional = true } +qrcode = { version = "^7.4.0", optional = true } +pygithub = { version = "^1.58.0", optional = true } +gradio = { version = "^3.40.0", optional = true } +google-generativeai = { version = "^0.5.0", optional = true } +openai = { version = "^0.27.0", optional = true } +scipy = { version = "^1.11.0", optional = true } +tiktoken = { version = "^0.3.0", optional = true } [tool.poetry.extras] # Grouped optional dependencies @@ -45,8 +61,6 @@ torch = ["torch"] gradio = ["gradio"] model_clients = ["leptonai", "google-generativeai", "openai"] tiktoken = ["tiktoken"] -#protobuf = ["protobuf"] -#pacmap = ["numba", "pacmap"] # Full installation full = [ @@ -59,8 +73,8 @@ full = [ "scipy", "spacy", "torch", "gradio", - "leptonai", "google-generativeai", "openai" - #"pacmap", "numba" + "leptonai", "google-generativeai", "openai", + "tiktoken" ] [tool.poetry.dev-dependencies] @@ -74,18 +88,4 @@ python-dotenv = "^1.0.0" requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" -[tool.pytest.ini_options] -norecursedirs = ["combined", "scripts"] - -markers = [ - "test: standard test", - "unit: Unit tests", - "integration: Integration tests", - "acceptance: Acceptance tests", - "experimental: Experimental tests" -] -log_cli = true -log_cli_level = "INFO" -log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" -log_cli_date_format = "%Y-%m-%d %H:%M:%S" -asyncio_default_fixture_loop_scope = "function" +[tool.pytest.ini From 31558d1f05e2af64be4526b58ed769335df7c2a9 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:52:47 -0600 Subject: [PATCH 25/42] comm - Update pyproject.toml --- pkgs/community/pyproject.toml | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index 5f4871f59..f0ff82388 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-community" -version = "0.5.3.dev2" +version = "0.5.3.dev3" description = "This repository includes Swarmauri community components." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -88,4 +88,18 @@ python-dotenv = "^1.0.0" requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" -[tool.pytest.ini +[tool.pytest.ini_options] +norecursedirs = ["combined", "scripts"] + +markers = [ + "test: standard test", + "unit: Unit tests", + "integration: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" From 66e42b824a1a901941772d0c62d681233d780e91 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 05:59:28 -0600 Subject: [PATCH 26/42] comm - Update pyproject.toml --- pkgs/community/pyproject.toml | 77 ++++++++++++++++------------------- 1 file changed, 35 insertions(+), 42 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index f0ff82388..71dfdb2b0 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -15,65 +15,58 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.10,<3.13" +captcha = "*" +chromadb = { version = "*", optional = true } +duckdb = { version = "*", optional = true } +folium = { version = "*", optional = true } +gensim = { version = "*", optional = true } +gradio = { version = "*", optional = true } +leptonai = { version = "0.22.0", optional = true } +neo4j = { version = "*", optional = true } +nltk = { version = "*", optional = true } +pandas = "*" +psutil = { version = "*", optional = true } +pygithub = { version = "*", optional = true } +python-dotenv = "*" +qrcode = { version = "*", optional = true } +redis = { version = "^4.0", optional = true } swarmauri = "==0.5.3.dev1" +textstat = { version = "*", optional = true } +transformers = { version = ">=4.45.0", optional = true } typing_extensions = "*" -matplotlib = { version = ">=3.9.2", optional = true } -nltk = { version = "^3.9.1", optional = true } -gensim = { version = "==4.3.3", optional = true } -transformers = { version = "^4.45.0", optional = true } -spacy = { version = ">=3.0.0,<=3.8.2", optional = true } -textblob = { version = "^0.18.0", optional = true } -torch = { version = "^2.5.0", optional = true } -leptonai = { version = "==0.22.0", optional = true } -redis = { version = "^4.0", optional = true } -pinecone-client = { version = ">=2.0.0", optional = true, extras = ["grpc"] } -textstat = { version = "^0.7.0", optional = true } -annoy = { version = "^1.17.1", optional = true } -folium = { version = "^0.14.0", optional = true } -duckdb = { version = "^0.8.1", optional = true } -neo4j = { version = "^5.12.0", optional = true } -chromadb = { version = "^0.4.4", optional = true } -qdrant_client = { version = "^1.2.0", optional = true } -weaviate-client = { version = "^3.12.0", optional = true } -PyPDF2 = { version = "^3.0.0", optional = true } -pymupdf = { version = "^1.21.1", optional = true } -pypdftk = { version = "^0.4.1", optional = true } -psutil = { version = "^5.9.5", optional = true } -qrcode = { version = "^7.4.0", optional = true } -pygithub = { version = "^1.58.0", optional = true } -gradio = { version = "^3.40.0", optional = true } -google-generativeai = { version = "^0.5.0", optional = true } -openai = { version = "^0.27.0", optional = true } -scipy = { version = "^1.11.0", optional = true } -tiktoken = { version = "^0.3.0", optional = true } +tiktoken = { version = "*", optional = true } +pymupdf = { version = "*", optional = true } +annoy = { version = "*", optional = true } +qdrant_client = { version = "*", optional = true } +weaviate = { version = "*", optional = true } +pinecone-client = { version = "*", optional = true, extras = ["grpc"] } +PyPDF2 = { version = "*", optional = true } +pypdftk = { version = "*", optional = true } +weaviate-client = { version = "*", optional = true } +protobuf = { version = "^3.20.0", optional = true } [tool.poetry.extras] # Grouped optional dependencies -nlp = ["nltk", "textblob", "textstat", "gensim"] +nlp = ["nltk", "gensim", "textstat"] ml_toolkits = ["transformers", "annoy"] -visualization = ["folium", "matplotlib"] -storage = ["redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate-client", "pinecone-client"] +visualization = ["folium"] +storage = ["redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate", "pinecone-client"] document_processing = ["PyPDF2", "pymupdf", "pypdftk"] cloud_integration = ["psutil", "qrcode", "pygithub"] -spacy = ["spacy"] -transformers = ["transformers"] -torch = ["torch"] gradio = ["gradio"] -model_clients = ["leptonai", "google-generativeai", "openai"] +model_clients = ["leptonai"] tiktoken = ["tiktoken"] # Full installation full = [ "nltk", "gensim", "textstat", "transformers", "annoy", - "folium", "matplotlib", - "redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate-client", "pinecone-client", + "folium", + "redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate", "pinecone-client", "PyPDF2", "pymupdf", "pypdftk", "psutil", "qrcode", "pygithub", - "scipy", "spacy", - "torch", "gradio", - "leptonai", "google-generativeai", "openai", + "leptonai", "tiktoken" ] @@ -82,7 +75,7 @@ flake8 = "^7.0" pytest = "^8.0" pytest-asyncio = ">=0.24.0" pytest-xdist = "^3.6.1" -python-dotenv = "^1.0.0" +python-dotenv = "*" [build-system] requires = ["poetry-core>=1.0.0"] From 4ddac14aae565d91f5217bcafe54f2d38fa274a2 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 06:01:53 -0600 Subject: [PATCH 27/42] Update increment_version_dev.yaml --- .github/workflows/increment_version_dev.yaml | 71 ++++++++------------ 1 file changed, 29 insertions(+), 42 deletions(-) diff --git a/.github/workflows/increment_version_dev.yaml b/.github/workflows/increment_version_dev.yaml index 551b83093..2a9736477 100644 --- a/.github/workflows/increment_version_dev.yaml +++ b/.github/workflows/increment_version_dev.yaml @@ -27,49 +27,36 @@ jobs: find . -name "pyproject.toml" | while read -r pyproject; do echo "Processing $pyproject" - # Extract current version - CURRENT_VERSION=$(python -c " -import tomlkit -with open('$pyproject', 'r') as f: - data = tomlkit.parse(f.read()) - print(data['tool']['poetry']['version']) -") + # Extract current version + CURRENT_VERSION=$(python -c " + import tomlkit + try: + with open('$pyproject', 'r') as f: + data = tomlkit.parse(f.read()) + print(data['tool']['poetry']['version']) + except Exception as e: + print('Error reading version:', e) + exit(1) + ") + + if [ -z "$CURRENT_VERSION" ]; then + echo "Error: Could not extract the current version from $pyproject" + exit 1 + fi + + # Increment version + BASE_VERSION=$(echo "$CURRENT_VERSION" | sed -E 's/(.*)-dev.*/\1/') + DEV_PART=$(echo "$CURRENT_VERSION" | grep -oE 'dev[0-9]+$' | grep -oE '[0-9]+') + + if [ -z "$DEV_PART" ]; then + DEV_PART=0 + fi + + NEW_DEV_PART=$((DEV_PART + 1)) + NEW_VERSION="${BASE_VERSION}-dev${NEW_DEV_PART}" + + echo "Updating version from $CURRENT_VERSION to $NEW_VERSION" - # Increment version - BASE_VERSION=$(echo "$CURRENT_VERSION" | sed -E 's/(.*)-dev.*/\1/') - DEV_PART=$(echo "$CURRENT_VERSION" | grep -oE 'dev[0-9]+$' | grep -oE '[0-9]+') - NEW_DEV_PART=$((DEV_PART + 1)) - NEW_VERSION="${BASE_VERSION}-dev${NEW_DEV_PART:-1}" - - echo "Updating version from $CURRENT_VERSION to $NEW_VERSION" - - # Update version in pyproject.toml - python -c " -import tomlkit -with open('$pyproject', 'r') as f: - data = tomlkit.parse(f.read()) -data['tool']['poetry']['version'] = '$NEW_VERSION' -with open('$pyproject', 'w') as f: - f.write(tomlkit.dumps(data)) -" - - # Update dependencies starting with 'swarmauri' - python -c " -import tomlkit -with open('$pyproject', 'r') as f: - data = tomlkit.parse(f.read()) -dependencies = data['tool']['poetry'].get('dependencies', {}) -for dep, version in dependencies.items(): - if dep.startswith('swarmauri') and isinstance(version, str): - base_version = version.split('-dev')[0] - dev_part = int(version.split('-dev')[-1]) if '-dev' in version else 0 - new_version = f'{base_version}-dev{dev_part + 1}' - dependencies[dep] = new_version -data['tool']['poetry']['dependencies'] = dependencies -with open('$pyproject', 'w') as f: - f.write(tomlkit.dumps(data)) -" - done - name: Commit changes run: | From 28c388c590d312300725c1cc26244863a90bfb6e Mon Sep 17 00:00:00 2001 From: michaeldecent2 <111002205+MichaelDecent@users.noreply.github.com> Date: Thu, 21 Nov 2024 14:19:48 +0100 Subject: [PATCH 28/42] comm - Update dependencies in pyproject.toml --- pkgs/community/pyproject.toml | 56 +++++++++++++++++------------------ 1 file changed, 27 insertions(+), 29 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index 71dfdb2b0..b33cc9ffb 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -15,43 +15,41 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.10,<3.13" -captcha = "*" -chromadb = { version = "*", optional = true } -duckdb = { version = "*", optional = true } -folium = { version = "*", optional = true } -gensim = { version = "*", optional = true } -gradio = { version = "*", optional = true } -leptonai = { version = "0.22.0", optional = true } -neo4j = { version = "*", optional = true } -nltk = { version = "*", optional = true } -pandas = "*" -psutil = { version = "*", optional = true } -pygithub = { version = "*", optional = true } -python-dotenv = "*" -qrcode = { version = "*", optional = true } +captcha = "^0.6.0" +chromadb = { version = "^0.5.17", optional = true } +duckdb = { version = "^1.1.1", optional = true } +folium = { version = "^0.18.0", optional = true } +gensim = { version = "^4.3.3", optional = true } +gradio = { version = "^5.4.0", optional = true } +leptonai = { version = "^0.22.0", optional = true } +neo4j = { version = "^5.25.0", optional = true } +nltk = { version = "^3.9.1", optional = true } +pandas = "^2.2.3" +psutil = { version = "^6.1.0", optional = true } +pygithub = { version = "^2.4.0", optional = true } +qrcode = { version = "^8.0", optional = true } redis = { version = "^4.0", optional = true } swarmauri = "==0.5.3.dev1" -textstat = { version = "*", optional = true } +textstat = { version = "^0.7.4", optional = true } transformers = { version = ">=4.45.0", optional = true } -typing_extensions = "*" -tiktoken = { version = "*", optional = true } -pymupdf = { version = "*", optional = true } -annoy = { version = "*", optional = true } -qdrant_client = { version = "*", optional = true } -weaviate = { version = "*", optional = true } -pinecone-client = { version = "*", optional = true, extras = ["grpc"] } -PyPDF2 = { version = "*", optional = true } -pypdftk = { version = "*", optional = true } -weaviate-client = { version = "*", optional = true } -protobuf = { version = "^3.20.0", optional = true } +typing_extensions = "^4.12.2" +tiktoken = { version = "^0.8.0", optional = true } +PyMuPDF = { version = "^1.24.12", optional = true } +annoy = { version = "^1.17.3", optional = true } +qdrant-client = { version = "^1.12.0", optional = true } +pinecone-client = { version = "^5.0.1", optional = true, extras = ["grpc"] } +pypdf = { version = "^5.0.1", optional = true } +pypdftk = { version = "^0.5", optional = true } +weaviate-client = { version = "^4.9.2", optional = true } +#protobuf = { version = "^3.20.0", optional = true } [tool.poetry.extras] # Grouped optional dependencies nlp = ["nltk", "gensim", "textstat"] ml_toolkits = ["transformers", "annoy"] visualization = ["folium"] -storage = ["redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate", "pinecone-client"] -document_processing = ["PyPDF2", "pymupdf", "pypdftk"] +storage = ["redis", "duckdb", "neo4j", "chromadb", "qdrant-client", "weaviate", "pinecone-client"] +document_processing = ["pypdf", "PyMuPDF", "pypdftk"] cloud_integration = ["psutil", "qrcode", "pygithub"] gradio = ["gradio"] model_clients = ["leptonai"] @@ -63,7 +61,7 @@ full = [ "transformers", "annoy", "folium", "redis", "duckdb", "neo4j", "chromadb", "qdrant_client", "weaviate", "pinecone-client", - "PyPDF2", "pymupdf", "pypdftk", + "pypdf", "PyMuPDF", "pypdftk", "psutil", "qrcode", "pygithub", "gradio", "leptonai", From fb2bb4f889f5a2a893b887062151f02124ddcfc9 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:48:24 -0600 Subject: [PATCH 29/42] swarm - Update pyproject.toml --- pkgs/swarmauri/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index 27656153a..ed5b5e509 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -15,7 +15,7 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.10,<3.13" -swarmauri_core = "==0.5.2" +swarmauri_core = "==0.5.3.dev2" toml = "^0.10.2" httpx = "^0.27.2" joblib = "^1.4.0" From 940b74ffa923c18a35f96f19e53a749d3f121914 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:49:14 -0600 Subject: [PATCH 30/42] core - Update pyproject.toml --- pkgs/core/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/core/pyproject.toml b/pkgs/core/pyproject.toml index 4f5283c21..dd96deb27 100644 --- a/pkgs/core/pyproject.toml +++ b/pkgs/core/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-core" -version = "0.5.2" +version = "0.5.3.dev2" description = "This repository includes core interfaces for the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" From 7c486db7a55fc4cec65a32dd8160d737cb4c5459 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:49:55 -0600 Subject: [PATCH 31/42] core - Update pyproject.toml --- pkgs/core/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/core/pyproject.toml b/pkgs/core/pyproject.toml index dd96deb27..887619499 100644 --- a/pkgs/core/pyproject.toml +++ b/pkgs/core/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-core" -version = "0.5.3.dev2" +version = "0.5.3.dev3" description = "This repository includes core interfaces for the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" From 4a6d9d6ced8659ba1360be79b8ffc48af496eecc Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:50:05 -0600 Subject: [PATCH 32/42] swarm - Update pyproject.toml --- pkgs/swarmauri/pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index ed5b5e509..4db045b85 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri" -version = "0.5.3.dev2" +version = "0.5.3.dev3" description = "This repository includes base classes, concrete generics, and concrete standard components within the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -15,7 +15,7 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.10,<3.13" -swarmauri_core = "==0.5.3.dev2" +swarmauri_core = "==0.5.3.dev3" toml = "^0.10.2" httpx = "^0.27.2" joblib = "^1.4.0" From a1e0967e5061f931872e6064c417d7c28fdd76f7 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:50:21 -0600 Subject: [PATCH 33/42] comm - Update pyproject.toml --- pkgs/community/pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index b33cc9ffb..e500a0d0f 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -4,7 +4,7 @@ version = "0.5.3.dev3" description = "This repository includes Swarmauri community components." authors = ["Jacob Stewart "] license = "Apache-2.0" -readme = "README.md" +readme = "README.md"e repository = "http://github.com/swarmauri/swarmauri-sdk" classifiers = [ "License :: OSI Approved :: Apache Software License", @@ -29,7 +29,7 @@ psutil = { version = "^6.1.0", optional = true } pygithub = { version = "^2.4.0", optional = true } qrcode = { version = "^8.0", optional = true } redis = { version = "^4.0", optional = true } -swarmauri = "==0.5.3.dev1" +swarmauri = "==0.5.3.dev3" textstat = { version = "^0.7.4", optional = true } transformers = { version = ">=4.45.0", optional = true } typing_extensions = "^4.12.2" From b015b32aa724e8d63be427fbb27f1168acef206c Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:50:39 -0600 Subject: [PATCH 34/42] Update pyproject.toml --- pkgs/experimental/pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/experimental/pyproject.toml b/pkgs/experimental/pyproject.toml index bea605df3..1d359131a 100644 --- a/pkgs/experimental/pyproject.toml +++ b/pkgs/experimental/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-experimental" -version = "0.5.2" +version = "0.5.3.dev3" description = "This repository includes experimental components." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -15,7 +15,7 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.10,<4.0" -swarmauri = "==0.5.2" +swarmauri = "==0.5.3.dev3" gensim = "*" neo4j = "*" numpy = "*" From b57270f5416c755ffbbb993ba06b0ff07795f995 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:53:55 -0600 Subject: [PATCH 35/42] swarm - Update pyproject.toml --- pkgs/swarmauri/pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index 4db045b85..26317b09f 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri" -version = "0.5.3.dev3" +version = "0.5.3.dev4" description = "This repository includes base classes, concrete generics, and concrete standard components within the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -17,7 +17,7 @@ classifiers = [ python = ">=3.10,<3.13" swarmauri_core = "==0.5.3.dev3" toml = "^0.10.2" -httpx = "^0.27.2" +httpx = "^0.25.0" joblib = "^1.4.0" numpy = "*" pandas = "*" From 9aaa12d1d75690fb17488e5fabcdf75fd38f9151 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:54:13 -0600 Subject: [PATCH 36/42] comm - Update pyproject.toml --- pkgs/community/pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index e500a0d0f..86112c335 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -1,10 +1,10 @@ [tool.poetry] name = "swarmauri-community" -version = "0.5.3.dev3" +version = "0.5.3.dev4" description = "This repository includes Swarmauri community components." authors = ["Jacob Stewart "] license = "Apache-2.0" -readme = "README.md"e +readme = "README.md" repository = "http://github.com/swarmauri/swarmauri-sdk" classifiers = [ "License :: OSI Approved :: Apache Software License", @@ -29,7 +29,7 @@ psutil = { version = "^6.1.0", optional = true } pygithub = { version = "^2.4.0", optional = true } qrcode = { version = "^8.0", optional = true } redis = { version = "^4.0", optional = true } -swarmauri = "==0.5.3.dev3" +swarmauri = "==0.5.3.dev4" textstat = { version = "^0.7.4", optional = true } transformers = { version = ">=4.45.0", optional = true } typing_extensions = "^4.12.2" From 6820a6f180b1036e6c6853615a64455bc3e24dc6 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 08:07:13 -0600 Subject: [PATCH 37/42] Update pyproject.toml --- pkgs/swarmauri/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index 26317b09f..14878d8e4 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -17,7 +17,7 @@ classifiers = [ python = ">=3.10,<3.13" swarmauri_core = "==0.5.3.dev3" toml = "^0.10.2" -httpx = "^0.25.0" +httpx = "^0.27.0" joblib = "^1.4.0" numpy = "*" pandas = "*" From c70f09f55f5b9406cf33bbe9373bc3ff5c17a72b Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 08:07:28 -0600 Subject: [PATCH 38/42] swarm - Update pyproject.toml --- pkgs/swarmauri/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index 14878d8e4..23ad0e37f 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri" -version = "0.5.3.dev4" +version = "0.5.3.dev5" description = "This repository includes base classes, concrete generics, and concrete standard components within the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" From 1257f30add771bf74e46a1ce28714f9e6880d378 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 08:07:46 -0600 Subject: [PATCH 39/42] swarm - Update pyproject.toml --- pkgs/community/pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/community/pyproject.toml b/pkgs/community/pyproject.toml index 86112c335..256adb6da 100644 --- a/pkgs/community/pyproject.toml +++ b/pkgs/community/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-community" -version = "0.5.3.dev4" +version = "0.5.3.dev5" description = "This repository includes Swarmauri community components." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -29,7 +29,7 @@ psutil = { version = "^6.1.0", optional = true } pygithub = { version = "^2.4.0", optional = true } qrcode = { version = "^8.0", optional = true } redis = { version = "^4.0", optional = true } -swarmauri = "==0.5.3.dev4" +swarmauri = "==0.5.3.dev5" textstat = { version = "^0.7.4", optional = true } transformers = { version = ">=4.45.0", optional = true } typing_extensions = "^4.12.2" From d7bc92ad7cddc85542e2306a61275fbae1b4108f Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 08:08:05 -0600 Subject: [PATCH 40/42] exp - Update pyproject.toml --- pkgs/experimental/pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/experimental/pyproject.toml b/pkgs/experimental/pyproject.toml index 1d359131a..78339dbbc 100644 --- a/pkgs/experimental/pyproject.toml +++ b/pkgs/experimental/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-experimental" -version = "0.5.3.dev3" +version = "0.5.3.dev5" description = "This repository includes experimental components." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -15,7 +15,7 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.10,<4.0" -swarmauri = "==0.5.3.dev3" +swarmauri = "==0.5.3.dev5" gensim = "*" neo4j = "*" numpy = "*" From d44a15dbfbbc430f8e23c38457f19b3c26adf589 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 08:13:36 -0600 Subject: [PATCH 41/42] cicd - Update sequence_publish.yaml --- .github/workflows/sequence_publish.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/sequence_publish.yaml b/.github/workflows/sequence_publish.yaml index 23a88d990..f219023f4 100644 --- a/.github/workflows/sequence_publish.yaml +++ b/.github/workflows/sequence_publish.yaml @@ -93,7 +93,7 @@ jobs: - uses: actions/checkout@v4 - name: Wait for swarmauri - run: sleep 60 + run: sleep 120 - name: Set up Python 3.12 uses: actions/setup-python@v5 From 51c1a44fc6342d374c9b657f18844a6b99a471a7 Mon Sep 17 00:00:00 2001 From: cobycloud <25079070+cobycloud@users.noreply.github.com> Date: Thu, 21 Nov 2024 08:44:44 -0600 Subject: [PATCH 42/42] exp - [Update pyproject.toml --- pkgs/experimental/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/experimental/pyproject.toml b/pkgs/experimental/pyproject.toml index 78339dbbc..2bcdb3b7f 100644 --- a/pkgs/experimental/pyproject.toml +++ b/pkgs/experimental/pyproject.toml @@ -14,7 +14,7 @@ classifiers = [ ] [tool.poetry.dependencies] -python = ">=3.10,<4.0" +python = ">=3.10,<3.13" swarmauri = "==0.5.3.dev5" gensim = "*" neo4j = "*"