diff --git a/.github/workflows.deprecated/create_component.yaml b/.github/deprecated_flows/create_component.yaml similarity index 100% rename from .github/workflows.deprecated/create_component.yaml rename to .github/deprecated_flows/create_component.yaml diff --git a/.github/workflows.deprecated/force_publish.yaml b/.github/deprecated_flows/force_publish.yaml similarity index 100% rename from .github/workflows.deprecated/force_publish.yaml rename to .github/deprecated_flows/force_publish.yaml diff --git a/.github/workflows.deprecated/increment_version_dev.yaml b/.github/deprecated_flows/increment_version_dev.yaml similarity index 100% rename from .github/workflows.deprecated/increment_version_dev.yaml rename to .github/deprecated_flows/increment_version_dev.yaml diff --git a/.github/workflows.deprecated/parse_feature_request.yaml b/.github/deprecated_flows/parse_feature_request.yaml similarity index 100% rename from .github/workflows.deprecated/parse_feature_request.yaml rename to .github/deprecated_flows/parse_feature_request.yaml diff --git a/.github/workflows.deprecated/sequence_publish.yaml b/.github/deprecated_flows/sequence_publish.yaml similarity index 100% rename from .github/workflows.deprecated/sequence_publish.yaml rename to .github/deprecated_flows/sequence_publish.yaml diff --git a/.github/workflows.deprecated/test_changed_files.yaml b/.github/deprecated_flows/test_changed_files.yaml similarity index 100% rename from .github/workflows.deprecated/test_changed_files.yaml rename to .github/deprecated_flows/test_changed_files.yaml diff --git a/.github/workflows.deprecated/test_dev.yml b/.github/deprecated_flows/test_dev.yml similarity index 100% rename from .github/workflows.deprecated/test_dev.yml rename to .github/deprecated_flows/test_dev.yml diff --git a/.github/workflows.deprecated/v0.6.0_install-and-build-packages.yml b/.github/deprecated_flows/v0.6.0_install-and-build-packages.yml similarity index 100% rename from .github/workflows.deprecated/v0.6.0_install-and-build-packages.yml rename to .github/deprecated_flows/v0.6.0_install-and-build-packages.yml diff --git a/.github/workflows.deprecated/v0.6.0_install-and-publish-packages(dep).yaml b/.github/deprecated_flows/v0.6.0_install-and-publish-packages(dep).yaml similarity index 100% rename from .github/workflows.deprecated/v0.6.0_install-and-publish-packages(dep).yaml rename to .github/deprecated_flows/v0.6.0_install-and-publish-packages(dep).yaml diff --git a/.github/workflows.deprecated/v0.6.0_install-and-publish-packages.yaml b/.github/deprecated_flows/v0.6.0_install-and-publish-packages.yaml similarity index 100% rename from .github/workflows.deprecated/v0.6.0_install-and-publish-packages.yaml rename to .github/deprecated_flows/v0.6.0_install-and-publish-packages.yaml diff --git a/.github/workflows.deprecated/v0.6.0_install_and_publish(dep).yaml b/.github/deprecated_flows/v0.6.0_install_and_publish(dep).yaml similarity index 100% rename from .github/workflows.deprecated/v0.6.0_install_and_publish(dep).yaml rename to .github/deprecated_flows/v0.6.0_install_and_publish(dep).yaml diff --git a/.github/workflows.deprecated/v0.6.0_install_and_publish.yaml b/.github/deprecated_flows/v0.6.0_install_and_publish.yaml similarity index 100% rename from .github/workflows.deprecated/v0.6.0_install_and_publish.yaml rename to .github/deprecated_flows/v0.6.0_install_and_publish.yaml diff --git a/.github/workflows.deprecated/v0.6.0_install_packages.yaml b/.github/deprecated_flows/v0.6.0_install_packages.yaml similarity index 100% rename from .github/workflows.deprecated/v0.6.0_install_packages.yaml rename to .github/deprecated_flows/v0.6.0_install_packages.yaml diff --git a/.github/workflows/v0.6.0_prepare_install_publish_single.yaml b/.github/workflows/v0.6.0_prepare_install_publish_single.yaml index eee09fd01..e8f18ed46 100644 --- a/.github/workflows/v0.6.0_prepare_install_publish_single.yaml +++ b/.github/workflows/v0.6.0_prepare_install_publish_single.yaml @@ -3,10 +3,9 @@ name: 0.6.0 - Single (Prepare, Install, Publish) on: workflow_dispatch: inputs: - soliloquy_file: + pyproject_directory: description: "File to use for install, prepare, and release steps" required: true - default: "pyproject.toml" bump_type: description: "Bump type (patch or finalize) for the prepare phase" required: false @@ -14,7 +13,7 @@ on: commit_msg: description: "Commit message for the prepare phase" required: false - default: "chore: auto increment packages" + default: "chore: auto increment package" concurrency: group: dev-branch-workflow-group @@ -54,8 +53,8 @@ jobs: - name: Run install phase run: | source $UNIQUE_VENV_PATH/bin/activate - cd pkgs - soliloquy install --file "${{ github.event.inputs.soliloquy_file }}" + cd "pkgs/${{ github.event.inputs.pyproject_directory }}" + soliloquy install --file "pyproject.toml" - name: Pip freeze run: | @@ -68,9 +67,9 @@ jobs: source $UNIQUE_VENV_PATH/bin/activate git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - cd pkgs + cd "pkgs/${{ github.event.inputs.pyproject_directory }}" soliloquy prepare \ - --file "${{ github.event.inputs.soliloquy_file }}" \ + --file "pyproject.toml" \ --bump "${{ github.event.inputs.bump_type }}" \ --commit-msg "${{ github.event.inputs.commit_msg }}" \ --lint-fix @@ -85,9 +84,9 @@ jobs: - name: Run release phase run: | source $UNIQUE_VENV_PATH/bin/activate - cd pkgs + cd "pkgs/${{ github.event.inputs.pyproject_directory }}" soliloquy release \ - --file "${{ github.event.inputs.soliloquy_file }}" \ + --file "pyproject.toml" \ --test-mode single \ --num-workers 4 \ --no-cleanup \ diff --git a/.github/workflows/v0.6.0_prepare_install_validate.yaml b/.github/workflows/v0.6.0_prepare_install_validate.yaml index 54bb7c584..3ab43d3b6 100644 --- a/.github/workflows/v0.6.0_prepare_install_validate.yaml +++ b/.github/workflows/v0.6.0_prepare_install_validate.yaml @@ -1,8 +1,7 @@ name: 0.6.0 - Mono (Prepare, Install, Validate) on: - push: - branches: [ "mono/dev" ] + workflow_dispatch: concurrency: group: dev-branch-workflow-group diff --git a/.github/workflows/v0.6.0_prepare_install_validate_single.yaml b/.github/workflows/v0.6.0_prepare_install_validate_single.yaml index 1db5ff957..06a11ec41 100644 --- a/.github/workflows/v0.6.0_prepare_install_validate_single.yaml +++ b/.github/workflows/v0.6.0_prepare_install_validate_single.yaml @@ -3,10 +3,9 @@ name: 0.6.0 - Single (Prepare, Install, Validate) on: workflow_dispatch: inputs: - soliloquy_file: + pyproject_directory: description: "File to use for install, prepare, and release steps" required: true - default: "pyproject.toml" bump_type: description: "Bump type (patch or finalize) for the prepare phase" required: false @@ -14,7 +13,11 @@ on: commit_msg: description: "Commit message for the prepare phase" required: false - default: "chore: auto increment packages" + default: "chore: auto increment package" + test_mode: + description: "Test-mode (each, single, monorepo) for the validate phase" + required: false + default: "single" concurrency: group: dev-branch-workflow-group @@ -55,8 +58,8 @@ jobs: - name: Run install phase run: | source $UNIQUE_VENV_PATH/bin/activate - cd pkgs - soliloquy install --file "${{ github.event.inputs.soliloquy_file }}" + cd "pkgs/${{ github.event.inputs.pyproject_directory }}" + soliloquy install --file "pyproject.toml" - name: Pip freeze run: | @@ -69,9 +72,9 @@ jobs: source $UNIQUE_VENV_PATH/bin/activate git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - cd pkgs + cd "pkgs/${{ github.event.inputs.pyproject_directory }}" soliloquy prepare \ - --file "${{ github.event.inputs.soliloquy_file }}" \ + --file "pyproject.toml" \ --bump "${{ github.event.inputs.bump_type }}" \ --commit-msg "${{ github.event.inputs.commit_msg }}" \ --lint-fix @@ -86,7 +89,7 @@ jobs: - name: Run validate phase run: | source $UNIQUE_VENV_PATH/bin/activate - cd pkgs - soliloquy validate --file "${{ github.event.inputs.soliloquy_file }}" \ - --test-mode each \ + cd "pkgs/${{ github.event.inputs.pyproject_directory }}" + soliloquy validate --file "pyproject.toml" \ + --test-mode "${{ github.event.inputs.test_mode }}" \ --num-workers 4 diff --git a/.github/workflows/v0.6.0_validate_changed_files.yaml b/.github/workflows/v0.6.0_validate_changed_files.yaml index 3054db6bf..72aca6b9e 100644 --- a/.github/workflows/v0.6.0_validate_changed_files.yaml +++ b/.github/workflows/v0.6.0_validate_changed_files.yaml @@ -78,7 +78,7 @@ jobs: PACKAGE_ROOT_PATH="${PACKAGE_ROOT#pkgs/}" # If the changed file itself is a test... - if echo "$FILE" | grep -qE '/tests/.*_test\.py$'; then + if echo "$FILE" | grep -qE '/tests/.*(test_.*\.py|.*_test\.py)$'; then # Keep test file path relative to the package root # For example, if FILE="pkgs/community/swarmauri_toolkit_github/tests/unit/test_xyz.py", # then RELATIVE_TEST_FILE="tests/unit/test_xyz.py" @@ -91,7 +91,7 @@ jobs: if [ -d "$TEST_DIR" ]; then # Find any test file that includes the changed component name - MATCHING_TEST_FILES=$(find "$TEST_DIR" -type f -iname "*${COMPONENT_NAME}*_test.py") + MATCHING_TEST_FILES=$(find "$TEST_DIR" -type f \( -iname "*${COMPONENT_NAME}_test.py" -o -iname "test_${COMPONENT_NAME}.py" \)) for TEST_FILE in $MATCHING_TEST_FILES; do RELATIVE_TEST_FILE=$(echo "$TEST_FILE" | sed "s|^$PACKAGE_ROOT/||") PACKAGE_TEST_MAP["$PACKAGE_ROOT_PATH"]="${PACKAGE_TEST_MAP[$PACKAGE_ROOT_PATH]} $RELATIVE_TEST_FILE" @@ -147,7 +147,7 @@ jobs: source $UNIQUE_VENV_PATH/bin/activate python -m pip install --upgrade pip pip install poetry - pip install soliloquy=="0.1.4" + pip install soliloquy=="0.1.5.dev1" - name: Soliloquy Install run: | @@ -180,4 +180,4 @@ jobs: # Move into the directory that has pyproject.toml cd "pkgs/$PKG_PATH" - soliloquy validate -f pyproject.toml --test-mode each --num-workers 4 + soliloquy validate -f pyproject.toml --test-mode single --num-workers 4 --required-passed ge:99 diff --git a/.gitignore b/.gitignore index 42ae61608..c9030bb21 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ env /combined .venv_core* *.obj +pytest_results.json diff --git a/STYLE_GUIDE.md b/STYLE_GUIDE.md new file mode 100644 index 000000000..afdc9789b --- /dev/null +++ b/STYLE_GUIDE.md @@ -0,0 +1,146 @@ +# SDK Style Guide + +This document outlines the coding and documentation standards for our SDK. Our goal is to ensure a consistent, clear, and maintainable codebase. We adhere to established Python standards (PEP 8 and PEP 257) and specifically utilize spaCy’s docstring style. + +--- + +## Table of Contents + +- [General Coding Standards](#general-coding-standards) +- [Docstring Conventions](#docstring-conventions) + - [Overview](#overview) + - [Structure](#structure) + - [Example](#example) +- [Contributing](#contributing) +- [References](#references) + +--- + +## General Coding Standards + +- **PEP 8 Compliance:** + Follow PEP 8 for overall code style, including naming conventions, line lengths, and whitespace usage. + +- **Readable and Maintainable Code:** + Write clear, self-explanatory code. Refactor and comment where necessary to improve clarity. + +- **Consistent Formatting:** + Use our style guide and formatting tools (e.g., linters, formatters) to maintain consistency across the codebase. + +--- + +## Docstring Conventions + +We follow spaCy’s docstring style to ensure our documentation is clear and consistent. + +### Overview + +- **Triple Double Quotes:** + Use `"""` for all docstrings. + +- **Placement:** + Place the docstring immediately after the function, method, class, or module definition. + +- **Imperative Mood:** + Write summaries in the imperative mood (e.g., "Return", "Process", "Compute"). + +### Structure + +1. **One-Line Summary:** + - A concise description of what the function/class/module does. + +2. **Blank Line:** + - Insert a blank line after the summary if the docstring contains additional detail. + +3. **Extended Description:** + - Provide any extra information necessary to understand the code’s purpose or behavior. + +4. **Parameter Section (Args):** + - List each parameter, its type, and a brief description. + +5. **Returns Section:** + - Describe the return type and the meaning of the returned value. + +6. **Raises Section (if applicable):** + - List any exceptions that might be raised by the function. + +### Example + +Below is an example demonstrating the spaCy docstring style at the method level: + +```python +def process_text(text: str) -> Doc: + """ + Process a text string and return a spaCy Doc object. + + This function tokenizes and annotates the input text using spaCy's NLP pipeline. + + Args: + text (str): The input text to be processed. + + Returns: + Doc: The processed spaCy Doc object. + + Raises: + ValueError: If the input text is empty. + """ + if not text: + raise ValueError("Input text must not be empty.") + # Processing code goes here +``` + +Below is an example demonstrating the spaCy docstring style at the module level: + +```python +""" +text_utils.py + +This module provides utilities for processing text data. It leverages spaCy's NLP +pipeline to tokenize, annotate, and analyze text, offering helper classes and functions +to simplify common text processing tasks. +""" + +import spacy +``` + +Below is an example demonstrating spaCy docstring style at the class level: + +```python +class TextProcessor: + """ + A class for processing and analyzing text data using spaCy. + + This class provides methods to tokenize, lemmatize, and extract named entities from text. + It utilizes spaCy's NLP pipeline to annotate the text and offers helper methods for + common text processing tasks. + + Attributes: + nlp (spacy.language.Language): The spaCy NLP model used for processing text. + """ + + def __init__(self, model: str = "en_core_web_sm"): + """ + Initialize the TextProcessor with the specified spaCy model. + + Args: + model (str): The name of the spaCy model to load (default is "en_core_web_sm"). + """ + self.nlp = spacy.load(model) + + def process(self, text: str): + """ + Process the input text and return the annotated spaCy Doc object. + + Args: + text (str): The input text to process. + + Returns: + spacy.tokens.Doc: The processed document with tokens, entities, and annotations. + + Raises: + ValueError: If the input text is empty. + """ + if not text: + raise ValueError("Input text must not be empty.") + return self.nlp(text) +``` \ No newline at end of file diff --git a/pkgs/standards/swarmauri_standard/LICENSE b/docs/LICENSE similarity index 100% rename from pkgs/standards/swarmauri_standard/LICENSE rename to docs/LICENSE diff --git a/docs/docs/foundation/index.md b/docs/docs/foundation/index.md new file mode 100644 index 000000000..baf836498 --- /dev/null +++ b/docs/docs/foundation/index.md @@ -0,0 +1,2 @@ +# Foundation + diff --git a/docs/docs/index.md b/docs/docs/index.md new file mode 100644 index 000000000..513d2fb2a --- /dev/null +++ b/docs/docs/index.md @@ -0,0 +1,29 @@ +--- +title: "Home" +hide_posts: true +--- +# Home + +Welcome to the official documentation for our Python monorepo! This site is built with [MkDocs](https://www.mkdocs.org/) and styled with [Material for MkDocs Insiders](https://squidfunk.github.io/mkdocs-material-insiders/). Here you'll find everything you need to get started, understand our guides, and explore our API references. + +## What You'll Find Here + +- **About**: Learn more about the project and its goals. +- **Guide**: Step-by-step instructions on how to install, configure, and use the various tools and modules. +- **API**: Detailed documentation for our Python modules. + +## Getting Started + +If you're new to the project, we recommend starting with the [About](about.md) page for an overview of our objectives. Then, head over to the [Guide](guide/installation.md) to learn how to set up and use our software. + +## Contributing + +We welcome contributions to the documentation! If you'd like to help improve the docs, please review our [Contributing Guidelines](CONTRIBUTING.md) to get started. + +## Feedback + +Have questions or feedback? Feel free to open an issue or reach out on our community channels. + +--- + +Enjoy your exploration, and happy documenting! diff --git a/docs/docs/news/.authors.yml b/docs/docs/news/.authors.yml new file mode 100644 index 000000000..040dcb9e4 --- /dev/null +++ b/docs/docs/news/.authors.yml @@ -0,0 +1,5 @@ +authors: + cobycloud: + name: Jacob Stewart + description: Creator + avatar: https://github.com/cobycloud.png \ No newline at end of file diff --git a/docs/docs/news/index.md b/docs/docs/news/index.md new file mode 100644 index 000000000..c58f16c50 --- /dev/null +++ b/docs/docs/news/index.md @@ -0,0 +1,2 @@ +# Blog + diff --git a/docs/docs/news/posts/hello-world.md b/docs/docs/news/posts/hello-world.md new file mode 100644 index 000000000..d3d1ecee2 --- /dev/null +++ b/docs/docs/news/posts/hello-world.md @@ -0,0 +1,11 @@ +--- +date: 2024-01-31 +tags: + - Foo + - Bar +authors: + - cobycloud +--- + + +# Hello World \ No newline at end of file diff --git a/docs/docs/src/swarmauri_base/agents.md b/docs/docs/src/swarmauri_base/agents.md new file mode 100644 index 000000000..0082bb7f2 --- /dev/null +++ b/docs/docs/src/swarmauri_base/agents.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.agents` + +::: swarmauri_base.agents + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/agents/AgentBase.md b/docs/docs/src/swarmauri_base/agents/AgentBase.md new file mode 100644 index 000000000..523ccb81b --- /dev/null +++ b/docs/docs/src/swarmauri_base/agents/AgentBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.agents.AgentBase.AgentBase` + +::: swarmauri_base.agents.AgentBase.AgentBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/agents/AgentConversationMixin.md b/docs/docs/src/swarmauri_base/agents/AgentConversationMixin.md new file mode 100644 index 000000000..88087ffd2 --- /dev/null +++ b/docs/docs/src/swarmauri_base/agents/AgentConversationMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.agents.AgentConversationMixin.AgentConversationMixin` + +::: swarmauri_base.agents.AgentConversationMixin.AgentConversationMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/agents/AgentRetrieveMixin.md b/docs/docs/src/swarmauri_base/agents/AgentRetrieveMixin.md new file mode 100644 index 000000000..61f108380 --- /dev/null +++ b/docs/docs/src/swarmauri_base/agents/AgentRetrieveMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.agents.AgentRetrieveMixin.AgentRetrieveMixin` + +::: swarmauri_base.agents.AgentRetrieveMixin.AgentRetrieveMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/agents/AgentSystemContextMixin.md b/docs/docs/src/swarmauri_base/agents/AgentSystemContextMixin.md new file mode 100644 index 000000000..e372f23b6 --- /dev/null +++ b/docs/docs/src/swarmauri_base/agents/AgentSystemContextMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.agents.AgentSystemContextMixin.AgentSystemContextMixin` + +::: swarmauri_base.agents.AgentSystemContextMixin.AgentSystemContextMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/agents/AgentToolMixin.md b/docs/docs/src/swarmauri_base/agents/AgentToolMixin.md new file mode 100644 index 000000000..ac08ba51b --- /dev/null +++ b/docs/docs/src/swarmauri_base/agents/AgentToolMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.agents.AgentToolMixin.AgentToolMixin` + +::: swarmauri_base.agents.AgentToolMixin.AgentToolMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/agents/AgentVectorStoreMixin.md b/docs/docs/src/swarmauri_base/agents/AgentVectorStoreMixin.md new file mode 100644 index 000000000..39f7d62a7 --- /dev/null +++ b/docs/docs/src/swarmauri_base/agents/AgentVectorStoreMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.agents.AgentVectorStoreMixin.AgentVectorStoreMixin` + +::: swarmauri_base.agents.AgentVectorStoreMixin.AgentVectorStoreMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/chains.md b/docs/docs/src/swarmauri_base/chains.md new file mode 100644 index 000000000..6c280b3e7 --- /dev/null +++ b/docs/docs/src/swarmauri_base/chains.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.chains` + +::: swarmauri_base.chains + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/chains/ChainBase.md b/docs/docs/src/swarmauri_base/chains/ChainBase.md new file mode 100644 index 000000000..e5cc53901 --- /dev/null +++ b/docs/docs/src/swarmauri_base/chains/ChainBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.chains.ChainBase.ChainBase` + +::: swarmauri_base.chains.ChainBase.ChainBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/chains/ChainContextBase.md b/docs/docs/src/swarmauri_base/chains/ChainContextBase.md new file mode 100644 index 000000000..d5274ec88 --- /dev/null +++ b/docs/docs/src/swarmauri_base/chains/ChainContextBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.chains.ChainContextBase.ChainContextBase` + +::: swarmauri_base.chains.ChainContextBase.ChainContextBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/chains/ChainStepBase.md b/docs/docs/src/swarmauri_base/chains/ChainStepBase.md new file mode 100644 index 000000000..f88c94321 --- /dev/null +++ b/docs/docs/src/swarmauri_base/chains/ChainStepBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.chains.ChainStepBase.ChainStepBase` + +::: swarmauri_base.chains.ChainStepBase.ChainStepBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/chunkers.md b/docs/docs/src/swarmauri_base/chunkers.md new file mode 100644 index 000000000..41d9b9cf6 --- /dev/null +++ b/docs/docs/src/swarmauri_base/chunkers.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.chunkers` + +::: swarmauri_base.chunkers + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/chunkers/ChunkerBase.md b/docs/docs/src/swarmauri_base/chunkers/ChunkerBase.md new file mode 100644 index 000000000..a955d763e --- /dev/null +++ b/docs/docs/src/swarmauri_base/chunkers/ChunkerBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.chunkers.ChunkerBase.ChunkerBase` + +::: swarmauri_base.chunkers.ChunkerBase.ChunkerBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/control_panels.md b/docs/docs/src/swarmauri_base/control_panels.md new file mode 100644 index 000000000..ab98cff97 --- /dev/null +++ b/docs/docs/src/swarmauri_base/control_panels.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.control_panels` + +::: swarmauri_base.control_panels + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/control_panels/ControlPanelBase.md b/docs/docs/src/swarmauri_base/control_panels/ControlPanelBase.md new file mode 100644 index 000000000..9077d88f4 --- /dev/null +++ b/docs/docs/src/swarmauri_base/control_panels/ControlPanelBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.control_panels.ControlPanelBase.ControlPanelBase` + +::: swarmauri_base.control_panels.ControlPanelBase.ControlPanelBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/conversations.md b/docs/docs/src/swarmauri_base/conversations.md new file mode 100644 index 000000000..115b2197d --- /dev/null +++ b/docs/docs/src/swarmauri_base/conversations.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.conversations` + +::: swarmauri_base.conversations + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/conversations/ConversationBase.md b/docs/docs/src/swarmauri_base/conversations/ConversationBase.md new file mode 100644 index 000000000..172efab93 --- /dev/null +++ b/docs/docs/src/swarmauri_base/conversations/ConversationBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.conversations.ConversationBase.ConversationBase` + +::: swarmauri_base.conversations.ConversationBase.ConversationBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/conversations/ConversationSystemContextMixin.md b/docs/docs/src/swarmauri_base/conversations/ConversationSystemContextMixin.md new file mode 100644 index 000000000..8d6c17d8a --- /dev/null +++ b/docs/docs/src/swarmauri_base/conversations/ConversationSystemContextMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.conversations.ConversationSystemContextMixin.ConversationSystemContextMixin` + +::: swarmauri_base.conversations.ConversationSystemContextMixin.ConversationSystemContextMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/dataconnectors.md b/docs/docs/src/swarmauri_base/dataconnectors.md new file mode 100644 index 000000000..48e77fbc1 --- /dev/null +++ b/docs/docs/src/swarmauri_base/dataconnectors.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.dataconnectors` + +::: swarmauri_base.dataconnectors + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/dataconnectors/DataConnectorBase.md b/docs/docs/src/swarmauri_base/dataconnectors/DataConnectorBase.md new file mode 100644 index 000000000..0ccd983d9 --- /dev/null +++ b/docs/docs/src/swarmauri_base/dataconnectors/DataConnectorBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.dataconnectors.DataConnectorBase.DataConnectorBase` + +::: swarmauri_base.dataconnectors.DataConnectorBase.DataConnectorBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/distances.md b/docs/docs/src/swarmauri_base/distances.md new file mode 100644 index 000000000..4f5e725b7 --- /dev/null +++ b/docs/docs/src/swarmauri_base/distances.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.distances` + +::: swarmauri_base.distances + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/distances/DistanceBase.md b/docs/docs/src/swarmauri_base/distances/DistanceBase.md new file mode 100644 index 000000000..11c4ae1cf --- /dev/null +++ b/docs/docs/src/swarmauri_base/distances/DistanceBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.distances.DistanceBase.DistanceBase` + +::: swarmauri_base.distances.DistanceBase.DistanceBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/distances/VisionDistanceBase.md b/docs/docs/src/swarmauri_base/distances/VisionDistanceBase.md new file mode 100644 index 000000000..f858a190b --- /dev/null +++ b/docs/docs/src/swarmauri_base/distances/VisionDistanceBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.distances.VisionDistanceBase.VisionDistanceBase` + +::: swarmauri_base.distances.VisionDistanceBase.VisionDistanceBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/document_stores.md b/docs/docs/src/swarmauri_base/document_stores.md new file mode 100644 index 000000000..a31877d73 --- /dev/null +++ b/docs/docs/src/swarmauri_base/document_stores.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.document_stores` + +::: swarmauri_base.document_stores + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/document_stores/DocumentStoreBase.md b/docs/docs/src/swarmauri_base/document_stores/DocumentStoreBase.md new file mode 100644 index 000000000..99a15bc36 --- /dev/null +++ b/docs/docs/src/swarmauri_base/document_stores/DocumentStoreBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.document_stores.DocumentStoreBase.DocumentStoreBase` + +::: swarmauri_base.document_stores.DocumentStoreBase.DocumentStoreBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/document_stores/DocumentStoreRetrieveBase.md b/docs/docs/src/swarmauri_base/document_stores/DocumentStoreRetrieveBase.md new file mode 100644 index 000000000..8052ed53c --- /dev/null +++ b/docs/docs/src/swarmauri_base/document_stores/DocumentStoreRetrieveBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.document_stores.DocumentStoreRetrieveBase.DocumentStoreRetrieveBase` + +::: swarmauri_base.document_stores.DocumentStoreRetrieveBase.DocumentStoreRetrieveBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/documents.md b/docs/docs/src/swarmauri_base/documents.md new file mode 100644 index 000000000..9ef8606d9 --- /dev/null +++ b/docs/docs/src/swarmauri_base/documents.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.documents` + +::: swarmauri_base.documents + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/documents/DocumentBase.md b/docs/docs/src/swarmauri_base/documents/DocumentBase.md new file mode 100644 index 000000000..8e757be6e --- /dev/null +++ b/docs/docs/src/swarmauri_base/documents/DocumentBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.documents.DocumentBase.DocumentBase` + +::: swarmauri_base.documents.DocumentBase.DocumentBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/embeddings.md b/docs/docs/src/swarmauri_base/embeddings.md new file mode 100644 index 000000000..f9550db51 --- /dev/null +++ b/docs/docs/src/swarmauri_base/embeddings.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.embeddings` + +::: swarmauri_base.embeddings + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/embeddings/EmbeddingBase.md b/docs/docs/src/swarmauri_base/embeddings/EmbeddingBase.md new file mode 100644 index 000000000..9cbf41bbe --- /dev/null +++ b/docs/docs/src/swarmauri_base/embeddings/EmbeddingBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.embeddings.EmbeddingBase.EmbeddingBase` + +::: swarmauri_base.embeddings.EmbeddingBase.EmbeddingBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/embeddings/VisionEmbeddingBase.md b/docs/docs/src/swarmauri_base/embeddings/VisionEmbeddingBase.md new file mode 100644 index 000000000..ac8106aab --- /dev/null +++ b/docs/docs/src/swarmauri_base/embeddings/VisionEmbeddingBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.embeddings.VisionEmbeddingBase.VisionEmbeddingBase` + +::: swarmauri_base.embeddings.VisionEmbeddingBase.VisionEmbeddingBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/image_gens.md b/docs/docs/src/swarmauri_base/image_gens.md new file mode 100644 index 000000000..30322c88d --- /dev/null +++ b/docs/docs/src/swarmauri_base/image_gens.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.image_gens` + +::: swarmauri_base.image_gens + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/image_gens/ImageGenBase.md b/docs/docs/src/swarmauri_base/image_gens/ImageGenBase.md new file mode 100644 index 000000000..52afb62eb --- /dev/null +++ b/docs/docs/src/swarmauri_base/image_gens/ImageGenBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.image_gens.ImageGenBase.ImageGenBase` + +::: swarmauri_base.image_gens.ImageGenBase.ImageGenBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/index.md b/docs/docs/src/swarmauri_base/index.md new file mode 100644 index 000000000..3bcaeca88 --- /dev/null +++ b/docs/docs/src/swarmauri_base/index.md @@ -0,0 +1,3 @@ +# Welcome + +This is the home page. diff --git a/docs/docs/src/swarmauri_base/llms.md b/docs/docs/src/swarmauri_base/llms.md new file mode 100644 index 000000000..26bab53de --- /dev/null +++ b/docs/docs/src/swarmauri_base/llms.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.llms` + +::: swarmauri_base.llms + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/llms/LLMBase.md b/docs/docs/src/swarmauri_base/llms/LLMBase.md new file mode 100644 index 000000000..6985405b5 --- /dev/null +++ b/docs/docs/src/swarmauri_base/llms/LLMBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.llms.LLMBase.LLMBase` + +::: swarmauri_base.llms.LLMBase.LLMBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/measurements.md b/docs/docs/src/swarmauri_base/measurements.md new file mode 100644 index 000000000..93bddc07a --- /dev/null +++ b/docs/docs/src/swarmauri_base/measurements.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.measurements` + +::: swarmauri_base.measurements + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/measurements/MeasurementAggregateMixin.md b/docs/docs/src/swarmauri_base/measurements/MeasurementAggregateMixin.md new file mode 100644 index 000000000..5a912ad19 --- /dev/null +++ b/docs/docs/src/swarmauri_base/measurements/MeasurementAggregateMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.measurements.MeasurementAggregateMixin.MeasurementAggregateMixin` + +::: swarmauri_base.measurements.MeasurementAggregateMixin.MeasurementAggregateMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/measurements/MeasurementBase.md b/docs/docs/src/swarmauri_base/measurements/MeasurementBase.md new file mode 100644 index 000000000..ac179c2e2 --- /dev/null +++ b/docs/docs/src/swarmauri_base/measurements/MeasurementBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.measurements.MeasurementBase.MeasurementBase` + +::: swarmauri_base.measurements.MeasurementBase.MeasurementBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/measurements/MeasurementCalculateMixin.md b/docs/docs/src/swarmauri_base/measurements/MeasurementCalculateMixin.md new file mode 100644 index 000000000..4164c6daf --- /dev/null +++ b/docs/docs/src/swarmauri_base/measurements/MeasurementCalculateMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.measurements.MeasurementCalculateMixin.MeasurementCalculateMixin` + +::: swarmauri_base.measurements.MeasurementCalculateMixin.MeasurementCalculateMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/measurements/MeasurementThresholdMixin.md b/docs/docs/src/swarmauri_base/measurements/MeasurementThresholdMixin.md new file mode 100644 index 000000000..73366c1e6 --- /dev/null +++ b/docs/docs/src/swarmauri_base/measurements/MeasurementThresholdMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.measurements.MeasurementThresholdMixin.MeasurementThresholdMixin` + +::: swarmauri_base.measurements.MeasurementThresholdMixin.MeasurementThresholdMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/messages.md b/docs/docs/src/swarmauri_base/messages.md new file mode 100644 index 000000000..f9e3e2042 --- /dev/null +++ b/docs/docs/src/swarmauri_base/messages.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.messages` + +::: swarmauri_base.messages + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/messages/MessageBase.md b/docs/docs/src/swarmauri_base/messages/MessageBase.md new file mode 100644 index 000000000..27a830a75 --- /dev/null +++ b/docs/docs/src/swarmauri_base/messages/MessageBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.messages.MessageBase.MessageBase` + +::: swarmauri_base.messages.MessageBase.MessageBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/parsers.md b/docs/docs/src/swarmauri_base/parsers.md new file mode 100644 index 000000000..9fd8d92d3 --- /dev/null +++ b/docs/docs/src/swarmauri_base/parsers.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.parsers` + +::: swarmauri_base.parsers + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/parsers/ParserBase.md b/docs/docs/src/swarmauri_base/parsers/ParserBase.md new file mode 100644 index 000000000..4319a8fb8 --- /dev/null +++ b/docs/docs/src/swarmauri_base/parsers/ParserBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.parsers.ParserBase.ParserBase` + +::: swarmauri_base.parsers.ParserBase.ParserBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/pipelines.md b/docs/docs/src/swarmauri_base/pipelines.md new file mode 100644 index 000000000..26ed1ac82 --- /dev/null +++ b/docs/docs/src/swarmauri_base/pipelines.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.pipelines` + +::: swarmauri_base.pipelines + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/pipelines/PipelineBase.md b/docs/docs/src/swarmauri_base/pipelines/PipelineBase.md new file mode 100644 index 000000000..1febeccd1 --- /dev/null +++ b/docs/docs/src/swarmauri_base/pipelines/PipelineBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.pipelines.PipelineBase.PipelineBase` + +::: swarmauri_base.pipelines.PipelineBase.PipelineBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/prompt_templates.md b/docs/docs/src/swarmauri_base/prompt_templates.md new file mode 100644 index 000000000..f2e0e160d --- /dev/null +++ b/docs/docs/src/swarmauri_base/prompt_templates.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.prompt_templates` + +::: swarmauri_base.prompt_templates + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/prompt_templates/PromptTemplateBase.md b/docs/docs/src/swarmauri_base/prompt_templates/PromptTemplateBase.md new file mode 100644 index 000000000..7cd98555f --- /dev/null +++ b/docs/docs/src/swarmauri_base/prompt_templates/PromptTemplateBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.prompt_templates.PromptTemplateBase.PromptTemplateBase` + +::: swarmauri_base.prompt_templates.PromptTemplateBase.PromptTemplateBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/prompts.md b/docs/docs/src/swarmauri_base/prompts.md new file mode 100644 index 000000000..264e8aae0 --- /dev/null +++ b/docs/docs/src/swarmauri_base/prompts.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.prompts` + +::: swarmauri_base.prompts + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/prompts/PromptBase.md b/docs/docs/src/swarmauri_base/prompts/PromptBase.md new file mode 100644 index 000000000..6279573ac --- /dev/null +++ b/docs/docs/src/swarmauri_base/prompts/PromptBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.prompts.PromptBase.PromptBase` + +::: swarmauri_base.prompts.PromptBase.PromptBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/prompts/PromptGeneratorBase.md b/docs/docs/src/swarmauri_base/prompts/PromptGeneratorBase.md new file mode 100644 index 000000000..9f95dd28d --- /dev/null +++ b/docs/docs/src/swarmauri_base/prompts/PromptGeneratorBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.prompts.PromptGeneratorBase.PromptGeneratorBase` + +::: swarmauri_base.prompts.PromptGeneratorBase.PromptGeneratorBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/prompts/PromptMatrixBase.md b/docs/docs/src/swarmauri_base/prompts/PromptMatrixBase.md new file mode 100644 index 000000000..39910754e --- /dev/null +++ b/docs/docs/src/swarmauri_base/prompts/PromptMatrixBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.prompts.PromptMatrixBase.PromptMatrixBase` + +::: swarmauri_base.prompts.PromptMatrixBase.PromptMatrixBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/schema_converters.md b/docs/docs/src/swarmauri_base/schema_converters.md new file mode 100644 index 000000000..9b54d41a4 --- /dev/null +++ b/docs/docs/src/swarmauri_base/schema_converters.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.schema_converters` + +::: swarmauri_base.schema_converters + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/schema_converters/SchemaConverterBase.md b/docs/docs/src/swarmauri_base/schema_converters/SchemaConverterBase.md new file mode 100644 index 000000000..6d545401d --- /dev/null +++ b/docs/docs/src/swarmauri_base/schema_converters/SchemaConverterBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.schema_converters.SchemaConverterBase.SchemaConverterBase` + +::: swarmauri_base.schema_converters.SchemaConverterBase.SchemaConverterBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/service_registries.md b/docs/docs/src/swarmauri_base/service_registries.md new file mode 100644 index 000000000..2940b5879 --- /dev/null +++ b/docs/docs/src/swarmauri_base/service_registries.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.service_registries` + +::: swarmauri_base.service_registries + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/service_registries/ServiceRegistryBase.md b/docs/docs/src/swarmauri_base/service_registries/ServiceRegistryBase.md new file mode 100644 index 000000000..66546953a --- /dev/null +++ b/docs/docs/src/swarmauri_base/service_registries/ServiceRegistryBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.service_registries.ServiceRegistryBase.ServiceRegistryBase` + +::: swarmauri_base.service_registries.ServiceRegistryBase.ServiceRegistryBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/state.md b/docs/docs/src/swarmauri_base/state.md new file mode 100644 index 000000000..827c29b01 --- /dev/null +++ b/docs/docs/src/swarmauri_base/state.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.state` + +::: swarmauri_base.state + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/state/StateBase.md b/docs/docs/src/swarmauri_base/state/StateBase.md new file mode 100644 index 000000000..1cce6d5bd --- /dev/null +++ b/docs/docs/src/swarmauri_base/state/StateBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.state.StateBase.StateBase` + +::: swarmauri_base.state.StateBase.StateBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/swarms.md b/docs/docs/src/swarmauri_base/swarms.md new file mode 100644 index 000000000..4c347ca16 --- /dev/null +++ b/docs/docs/src/swarmauri_base/swarms.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.swarms` + +::: swarmauri_base.swarms + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/swarms/SwarmBase.md b/docs/docs/src/swarmauri_base/swarms/SwarmBase.md new file mode 100644 index 000000000..46b186d14 --- /dev/null +++ b/docs/docs/src/swarmauri_base/swarms/SwarmBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.swarms.SwarmBase.SwarmBase` + +::: swarmauri_base.swarms.SwarmBase.SwarmBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/swarms/SwarmStatus.md b/docs/docs/src/swarmauri_base/swarms/SwarmStatus.md new file mode 100644 index 000000000..1c3c46e17 --- /dev/null +++ b/docs/docs/src/swarmauri_base/swarms/SwarmStatus.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.swarms.SwarmBase.SwarmStatus` + +::: swarmauri_base.swarms.SwarmBase.SwarmStatus + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/task_mgmt_strategies.md b/docs/docs/src/swarmauri_base/task_mgmt_strategies.md new file mode 100644 index 000000000..5af034abe --- /dev/null +++ b/docs/docs/src/swarmauri_base/task_mgmt_strategies.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.task_mgmt_strategies` + +::: swarmauri_base.task_mgmt_strategies + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/task_mgmt_strategies/TaskMgmtStrategyBase.md b/docs/docs/src/swarmauri_base/task_mgmt_strategies/TaskMgmtStrategyBase.md new file mode 100644 index 000000000..c219f173a --- /dev/null +++ b/docs/docs/src/swarmauri_base/task_mgmt_strategies/TaskMgmtStrategyBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.task_mgmt_strategies.TaskMgmtStrategyBase.TaskMgmtStrategyBase` + +::: swarmauri_base.task_mgmt_strategies.TaskMgmtStrategyBase.TaskMgmtStrategyBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/toolkits.md b/docs/docs/src/swarmauri_base/toolkits.md new file mode 100644 index 000000000..ea3d1c23d --- /dev/null +++ b/docs/docs/src/swarmauri_base/toolkits.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.toolkits` + +::: swarmauri_base.toolkits + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/toolkits/ToolkitBase.md b/docs/docs/src/swarmauri_base/toolkits/ToolkitBase.md new file mode 100644 index 000000000..ff63fd876 --- /dev/null +++ b/docs/docs/src/swarmauri_base/toolkits/ToolkitBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.toolkits.ToolkitBase.ToolkitBase` + +::: swarmauri_base.toolkits.ToolkitBase.ToolkitBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/tools.md b/docs/docs/src/swarmauri_base/tools.md new file mode 100644 index 000000000..3a9324be7 --- /dev/null +++ b/docs/docs/src/swarmauri_base/tools.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.tools` + +::: swarmauri_base.tools + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/tools/ParameterBase.md b/docs/docs/src/swarmauri_base/tools/ParameterBase.md new file mode 100644 index 000000000..74a710ecb --- /dev/null +++ b/docs/docs/src/swarmauri_base/tools/ParameterBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.tools.ParameterBase.ParameterBase` + +::: swarmauri_base.tools.ParameterBase.ParameterBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/tools/ToolBase.md b/docs/docs/src/swarmauri_base/tools/ToolBase.md new file mode 100644 index 000000000..7855164e1 --- /dev/null +++ b/docs/docs/src/swarmauri_base/tools/ToolBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.tools.ToolBase.ToolBase` + +::: swarmauri_base.tools.ToolBase.ToolBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/transports.md b/docs/docs/src/swarmauri_base/transports.md new file mode 100644 index 000000000..516f47b40 --- /dev/null +++ b/docs/docs/src/swarmauri_base/transports.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.transports` + +::: swarmauri_base.transports + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/transports/TransportBase.md b/docs/docs/src/swarmauri_base/transports/TransportBase.md new file mode 100644 index 000000000..669d1e791 --- /dev/null +++ b/docs/docs/src/swarmauri_base/transports/TransportBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.transports.TransportBase.TransportBase` + +::: swarmauri_base.transports.TransportBase.TransportBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/transports/TransportProtocol.md b/docs/docs/src/swarmauri_base/transports/TransportProtocol.md new file mode 100644 index 000000000..f7236fbca --- /dev/null +++ b/docs/docs/src/swarmauri_base/transports/TransportProtocol.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.transports.TransportBase.TransportProtocol` + +::: swarmauri_base.transports.TransportBase.TransportProtocol + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/vector_stores.md b/docs/docs/src/swarmauri_base/vector_stores.md new file mode 100644 index 000000000..cabfe24a6 --- /dev/null +++ b/docs/docs/src/swarmauri_base/vector_stores.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.vector_stores` + +::: swarmauri_base.vector_stores + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/vector_stores/VectorStoreBase.md b/docs/docs/src/swarmauri_base/vector_stores/VectorStoreBase.md new file mode 100644 index 000000000..e8f4c1cbd --- /dev/null +++ b/docs/docs/src/swarmauri_base/vector_stores/VectorStoreBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.vector_stores.VectorStoreBase.VectorStoreBase` + +::: swarmauri_base.vector_stores.VectorStoreBase.VectorStoreBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/vector_stores/VectorStoreCloudMixin.md b/docs/docs/src/swarmauri_base/vector_stores/VectorStoreCloudMixin.md new file mode 100644 index 000000000..59c54155a --- /dev/null +++ b/docs/docs/src/swarmauri_base/vector_stores/VectorStoreCloudMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.vector_stores.VectorStoreCloudMixin.VectorStoreCloudMixin` + +::: swarmauri_base.vector_stores.VectorStoreCloudMixin.VectorStoreCloudMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/vector_stores/VectorStorePersistentMixin.md b/docs/docs/src/swarmauri_base/vector_stores/VectorStorePersistentMixin.md new file mode 100644 index 000000000..79134b6b5 --- /dev/null +++ b/docs/docs/src/swarmauri_base/vector_stores/VectorStorePersistentMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.vector_stores.VectorStorePersistentMixin.VectorStorePersistentMixin` + +::: swarmauri_base.vector_stores.VectorStorePersistentMixin.VectorStorePersistentMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/vector_stores/VectorStoreRetrieveMixin.md b/docs/docs/src/swarmauri_base/vector_stores/VectorStoreRetrieveMixin.md new file mode 100644 index 000000000..c510b6546 --- /dev/null +++ b/docs/docs/src/swarmauri_base/vector_stores/VectorStoreRetrieveMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.vector_stores.VectorStoreRetrieveMixin.VectorStoreRetrieveMixin` + +::: swarmauri_base.vector_stores.VectorStoreRetrieveMixin.VectorStoreRetrieveMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/vector_stores/VectorStoreSaveLoadMixin.md b/docs/docs/src/swarmauri_base/vector_stores/VectorStoreSaveLoadMixin.md new file mode 100644 index 000000000..e13092ddd --- /dev/null +++ b/docs/docs/src/swarmauri_base/vector_stores/VectorStoreSaveLoadMixin.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.vector_stores.VectorStoreSaveLoadMixin.VectorStoreSaveLoadMixin` + +::: swarmauri_base.vector_stores.VectorStoreSaveLoadMixin.VectorStoreSaveLoadMixin + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/vector_stores/VisionVectorStoreBase.md b/docs/docs/src/swarmauri_base/vector_stores/VisionVectorStoreBase.md new file mode 100644 index 000000000..03e7cf16c --- /dev/null +++ b/docs/docs/src/swarmauri_base/vector_stores/VisionVectorStoreBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.vector_stores.VisionVectorStoreBase.VisionVectorStoreBase` + +::: swarmauri_base.vector_stores.VisionVectorStoreBase.VisionVectorStoreBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_base/vectors.md b/docs/docs/src/swarmauri_base/vectors.md new file mode 100644 index 000000000..093500e35 --- /dev/null +++ b/docs/docs/src/swarmauri_base/vectors.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_base.vectors` + +::: swarmauri_base.vectors + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_base/vectors/VectorBase.md b/docs/docs/src/swarmauri_base/vectors/VectorBase.md new file mode 100644 index 000000000..4e0b3f4c7 --- /dev/null +++ b/docs/docs/src/swarmauri_base/vectors/VectorBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_base.vectors.VectorBase.VectorBase` + +::: swarmauri_base.vectors.VectorBase.VectorBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/ComponentBase.md b/docs/docs/src/swarmauri_core/ComponentBase.md new file mode 100644 index 000000000..ed29f0eb3 --- /dev/null +++ b/docs/docs/src/swarmauri_core/ComponentBase.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.ComponentBase.ComponentBase` + +::: swarmauri_core.ComponentBase.ComponentBase + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/ResourceType.md b/docs/docs/src/swarmauri_core/ResourceType.md new file mode 100644 index 000000000..3d4d598a1 --- /dev/null +++ b/docs/docs/src/swarmauri_core/ResourceType.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.ComponentBase.ResourceType` + +::: swarmauri_core.ComponentBase.ResourceType + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/ResourceTypes.md b/docs/docs/src/swarmauri_core/ResourceTypes.md new file mode 100644 index 000000000..456008aea --- /dev/null +++ b/docs/docs/src/swarmauri_core/ResourceTypes.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.ComponentBase.ResourceTypes` + +::: swarmauri_core.ComponentBase.ResourceTypes + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/SubclassUnion.md b/docs/docs/src/swarmauri_core/SubclassUnion.md new file mode 100644 index 000000000..e72ff0977 --- /dev/null +++ b/docs/docs/src/swarmauri_core/SubclassUnion.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.ComponentBase.SubclassUnion` + +::: swarmauri_core.ComponentBase.SubclassUnion + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agent_apis.md b/docs/docs/src/swarmauri_core/agent_apis.md new file mode 100644 index 000000000..31aca01ea --- /dev/null +++ b/docs/docs/src/swarmauri_core/agent_apis.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.agent_apis` + +::: swarmauri_core.agent_apis + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/agent_apis/IAgentCommands.md b/docs/docs/src/swarmauri_core/agent_apis/IAgentCommands.md new file mode 100644 index 000000000..d11864e60 --- /dev/null +++ b/docs/docs/src/swarmauri_core/agent_apis/IAgentCommands.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agent_apis.IAgentCommands.IAgentCommands` + +::: swarmauri_core.agent_apis.IAgentCommands.IAgentCommands + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agent_apis/IAgentRouterCRUD.md b/docs/docs/src/swarmauri_core/agent_apis/IAgentRouterCRUD.md new file mode 100644 index 000000000..e77e7493a --- /dev/null +++ b/docs/docs/src/swarmauri_core/agent_apis/IAgentRouterCRUD.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agent_apis.IAgentRouterCRUD.IAgentRouterCRUD` + +::: swarmauri_core.agent_apis.IAgentRouterCRUD.IAgentRouterCRUD + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agent_factories.md b/docs/docs/src/swarmauri_core/agent_factories.md new file mode 100644 index 000000000..8f4e76343 --- /dev/null +++ b/docs/docs/src/swarmauri_core/agent_factories.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.agent_factories` + +::: swarmauri_core.agent_factories + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/agent_factories/IExportConf.md b/docs/docs/src/swarmauri_core/agent_factories/IExportConf.md new file mode 100644 index 000000000..91f3aea78 --- /dev/null +++ b/docs/docs/src/swarmauri_core/agent_factories/IExportConf.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agent_factories.IExportConf.IExportConf` + +::: swarmauri_core.agent_factories.IExportConf.IExportConf + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agents.md b/docs/docs/src/swarmauri_core/agents.md new file mode 100644 index 000000000..17e0e163e --- /dev/null +++ b/docs/docs/src/swarmauri_core/agents.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.agents` + +::: swarmauri_core.agents + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/agents/IAgent.md b/docs/docs/src/swarmauri_core/agents/IAgent.md new file mode 100644 index 000000000..d2e0865ea --- /dev/null +++ b/docs/docs/src/swarmauri_core/agents/IAgent.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agents.IAgent.IAgent` + +::: swarmauri_core.agents.IAgent.IAgent + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agents/IAgentConversation.md b/docs/docs/src/swarmauri_core/agents/IAgentConversation.md new file mode 100644 index 000000000..dc7f3f8eb --- /dev/null +++ b/docs/docs/src/swarmauri_core/agents/IAgentConversation.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agents.IAgentConversation.IAgentConversation` + +::: swarmauri_core.agents.IAgentConversation.IAgentConversation + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agents/IAgentParser.md b/docs/docs/src/swarmauri_core/agents/IAgentParser.md new file mode 100644 index 000000000..37cb2c8df --- /dev/null +++ b/docs/docs/src/swarmauri_core/agents/IAgentParser.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agents.IAgentParser.IAgentParser` + +::: swarmauri_core.agents.IAgentParser.IAgentParser + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agents/IAgentRetrieve.md b/docs/docs/src/swarmauri_core/agents/IAgentRetrieve.md new file mode 100644 index 000000000..9aa935f72 --- /dev/null +++ b/docs/docs/src/swarmauri_core/agents/IAgentRetrieve.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agents.IAgentRetrieve.IAgentRetrieve` + +::: swarmauri_core.agents.IAgentRetrieve.IAgentRetrieve + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agents/IAgentSystemContext.md b/docs/docs/src/swarmauri_core/agents/IAgentSystemContext.md new file mode 100644 index 000000000..f15c2f320 --- /dev/null +++ b/docs/docs/src/swarmauri_core/agents/IAgentSystemContext.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agents.IAgentSystemContext.IAgentSystemContext` + +::: swarmauri_core.agents.IAgentSystemContext.IAgentSystemContext + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agents/IAgentToolkit.md b/docs/docs/src/swarmauri_core/agents/IAgentToolkit.md new file mode 100644 index 000000000..b078b9758 --- /dev/null +++ b/docs/docs/src/swarmauri_core/agents/IAgentToolkit.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agents.IAgentToolkit.IAgentToolkit` + +::: swarmauri_core.agents.IAgentToolkit.IAgentToolkit + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/agents/IAgentVectorStore.md b/docs/docs/src/swarmauri_core/agents/IAgentVectorStore.md new file mode 100644 index 000000000..ef4022755 --- /dev/null +++ b/docs/docs/src/swarmauri_core/agents/IAgentVectorStore.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.agents.IAgentVectorStore.IAgentVectorStore` + +::: swarmauri_core.agents.IAgentVectorStore.IAgentVectorStore + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/chains.md b/docs/docs/src/swarmauri_core/chains.md new file mode 100644 index 000000000..211f1c77e --- /dev/null +++ b/docs/docs/src/swarmauri_core/chains.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.chains` + +::: swarmauri_core.chains + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/chains/ICallableChain.md b/docs/docs/src/swarmauri_core/chains/ICallableChain.md new file mode 100644 index 000000000..25538e079 --- /dev/null +++ b/docs/docs/src/swarmauri_core/chains/ICallableChain.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.chains.ICallableChain.ICallableChain` + +::: swarmauri_core.chains.ICallableChain.ICallableChain + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/chains/IChain.md b/docs/docs/src/swarmauri_core/chains/IChain.md new file mode 100644 index 000000000..d1a9e8a54 --- /dev/null +++ b/docs/docs/src/swarmauri_core/chains/IChain.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.chains.IChain.IChain` + +::: swarmauri_core.chains.IChain.IChain + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/chains/IChainContext.md b/docs/docs/src/swarmauri_core/chains/IChainContext.md new file mode 100644 index 000000000..b4841ed88 --- /dev/null +++ b/docs/docs/src/swarmauri_core/chains/IChainContext.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.chains.IChainContext.IChainContext` + +::: swarmauri_core.chains.IChainContext.IChainContext + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/chains/IChainContextLoader.md b/docs/docs/src/swarmauri_core/chains/IChainContextLoader.md new file mode 100644 index 000000000..21d752b6f --- /dev/null +++ b/docs/docs/src/swarmauri_core/chains/IChainContextLoader.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.chains.IChainContextLoader.IChainContextLoader` + +::: swarmauri_core.chains.IChainContextLoader.IChainContextLoader + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/chains/IChainFactory.md b/docs/docs/src/swarmauri_core/chains/IChainFactory.md new file mode 100644 index 000000000..9c44f87d7 --- /dev/null +++ b/docs/docs/src/swarmauri_core/chains/IChainFactory.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.chains.IChainFactory.IChainFactory` + +::: swarmauri_core.chains.IChainFactory.IChainFactory + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/chains/IChainStep.md b/docs/docs/src/swarmauri_core/chains/IChainStep.md new file mode 100644 index 000000000..7dc91b3ec --- /dev/null +++ b/docs/docs/src/swarmauri_core/chains/IChainStep.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.chains.IChainStep.IChainStep` + +::: swarmauri_core.chains.IChainStep.IChainStep + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/chunkers.md b/docs/docs/src/swarmauri_core/chunkers.md new file mode 100644 index 000000000..ff83cfa2a --- /dev/null +++ b/docs/docs/src/swarmauri_core/chunkers.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.chunkers` + +::: swarmauri_core.chunkers + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/chunkers/IChunker.md b/docs/docs/src/swarmauri_core/chunkers/IChunker.md new file mode 100644 index 000000000..f59f27121 --- /dev/null +++ b/docs/docs/src/swarmauri_core/chunkers/IChunker.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.chunkers.IChunker.IChunker` + +::: swarmauri_core.chunkers.IChunker.IChunker + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/control_panels.md b/docs/docs/src/swarmauri_core/control_panels.md new file mode 100644 index 000000000..a23e640a8 --- /dev/null +++ b/docs/docs/src/swarmauri_core/control_panels.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.control_panels` + +::: swarmauri_core.control_panels + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/control_panels/IControlPanel.md b/docs/docs/src/swarmauri_core/control_panels/IControlPanel.md new file mode 100644 index 000000000..5137c85aa --- /dev/null +++ b/docs/docs/src/swarmauri_core/control_panels/IControlPanel.md @@ -0,0 +1,13 @@ +# Documentation for `swarmauri_core.control_panels.IControlPanel` + +::: swarmauri_core.control_panels.IControlPanel + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + +## Classes + +- [`IControlPlane`](IControlPlane.md) + diff --git a/docs/docs/src/swarmauri_core/control_panels/IControlPlane.md b/docs/docs/src/swarmauri_core/control_panels/IControlPlane.md new file mode 100644 index 000000000..0ee89fd1f --- /dev/null +++ b/docs/docs/src/swarmauri_core/control_panels/IControlPlane.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.control_panels.IControlPanel.IControlPlane` + +::: swarmauri_core.control_panels.IControlPanel.IControlPlane + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/conversations.md b/docs/docs/src/swarmauri_core/conversations.md new file mode 100644 index 000000000..8f62787e6 --- /dev/null +++ b/docs/docs/src/swarmauri_core/conversations.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.conversations` + +::: swarmauri_core.conversations + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/conversations/IConversation.md b/docs/docs/src/swarmauri_core/conversations/IConversation.md new file mode 100644 index 000000000..907391031 --- /dev/null +++ b/docs/docs/src/swarmauri_core/conversations/IConversation.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.conversations.IConversation.IConversation` + +::: swarmauri_core.conversations.IConversation.IConversation + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/conversations/IMaxSize.md b/docs/docs/src/swarmauri_core/conversations/IMaxSize.md new file mode 100644 index 000000000..4723fa8c4 --- /dev/null +++ b/docs/docs/src/swarmauri_core/conversations/IMaxSize.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.conversations.IMaxSize.IMaxSize` + +::: swarmauri_core.conversations.IMaxSize.IMaxSize + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/conversations/ISystemContext.md b/docs/docs/src/swarmauri_core/conversations/ISystemContext.md new file mode 100644 index 000000000..d609aff19 --- /dev/null +++ b/docs/docs/src/swarmauri_core/conversations/ISystemContext.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.conversations.ISystemContext.ISystemContext` + +::: swarmauri_core.conversations.ISystemContext.ISystemContext + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/dataconnectors.md b/docs/docs/src/swarmauri_core/dataconnectors.md new file mode 100644 index 000000000..c50282883 --- /dev/null +++ b/docs/docs/src/swarmauri_core/dataconnectors.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.dataconnectors` + +::: swarmauri_core.dataconnectors + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/dataconnectors/IDataConnector.md b/docs/docs/src/swarmauri_core/dataconnectors/IDataConnector.md new file mode 100644 index 000000000..d3b9eb354 --- /dev/null +++ b/docs/docs/src/swarmauri_core/dataconnectors/IDataConnector.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.dataconnectors.IDataConnector.IDataConnector` + +::: swarmauri_core.dataconnectors.IDataConnector.IDataConnector + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/distances.md b/docs/docs/src/swarmauri_core/distances.md new file mode 100644 index 000000000..63e48628f --- /dev/null +++ b/docs/docs/src/swarmauri_core/distances.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.distances` + +::: swarmauri_core.distances + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/distances/IDistanceSimilarity.md b/docs/docs/src/swarmauri_core/distances/IDistanceSimilarity.md new file mode 100644 index 000000000..a3b7fe298 --- /dev/null +++ b/docs/docs/src/swarmauri_core/distances/IDistanceSimilarity.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.distances.IDistanceSimilarity.IDistanceSimilarity` + +::: swarmauri_core.distances.IDistanceSimilarity.IDistanceSimilarity + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/document_stores.md b/docs/docs/src/swarmauri_core/document_stores.md new file mode 100644 index 000000000..de4f03d99 --- /dev/null +++ b/docs/docs/src/swarmauri_core/document_stores.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.document_stores` + +::: swarmauri_core.document_stores + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/document_stores/IDocumentRetrieve.md b/docs/docs/src/swarmauri_core/document_stores/IDocumentRetrieve.md new file mode 100644 index 000000000..5d37e612b --- /dev/null +++ b/docs/docs/src/swarmauri_core/document_stores/IDocumentRetrieve.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.document_stores.IDocumentRetrieve.IDocumentRetrieve` + +::: swarmauri_core.document_stores.IDocumentRetrieve.IDocumentRetrieve + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/document_stores/IDocumentStore.md b/docs/docs/src/swarmauri_core/document_stores/IDocumentStore.md new file mode 100644 index 000000000..49fdd2bb1 --- /dev/null +++ b/docs/docs/src/swarmauri_core/document_stores/IDocumentStore.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.document_stores.IDocumentStore.IDocumentStore` + +::: swarmauri_core.document_stores.IDocumentStore.IDocumentStore + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/documents.md b/docs/docs/src/swarmauri_core/documents.md new file mode 100644 index 000000000..3dd363696 --- /dev/null +++ b/docs/docs/src/swarmauri_core/documents.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.documents` + +::: swarmauri_core.documents + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/documents/IDocument.md b/docs/docs/src/swarmauri_core/documents/IDocument.md new file mode 100644 index 000000000..d7c6608e5 --- /dev/null +++ b/docs/docs/src/swarmauri_core/documents/IDocument.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.documents.IDocument.IDocument` + +::: swarmauri_core.documents.IDocument.IDocument + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/documents/IExperimentDocument.md b/docs/docs/src/swarmauri_core/documents/IExperimentDocument.md new file mode 100644 index 000000000..d63cb6e84 --- /dev/null +++ b/docs/docs/src/swarmauri_core/documents/IExperimentDocument.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.documents.IExperimentDocument.IExperimentDocument` + +::: swarmauri_core.documents.IExperimentDocument.IExperimentDocument + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/embeddings.md b/docs/docs/src/swarmauri_core/embeddings.md new file mode 100644 index 000000000..d8fdfd194 --- /dev/null +++ b/docs/docs/src/swarmauri_core/embeddings.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.embeddings` + +::: swarmauri_core.embeddings + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/embeddings/IFeature.md b/docs/docs/src/swarmauri_core/embeddings/IFeature.md new file mode 100644 index 000000000..42309800f --- /dev/null +++ b/docs/docs/src/swarmauri_core/embeddings/IFeature.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.embeddings.IFeature.IFeature` + +::: swarmauri_core.embeddings.IFeature.IFeature + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/embeddings/ISaveModel.md b/docs/docs/src/swarmauri_core/embeddings/ISaveModel.md new file mode 100644 index 000000000..5fd007e45 --- /dev/null +++ b/docs/docs/src/swarmauri_core/embeddings/ISaveModel.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.embeddings.ISaveModel.ISaveModel` + +::: swarmauri_core.embeddings.ISaveModel.ISaveModel + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/embeddings/IVectorize.md b/docs/docs/src/swarmauri_core/embeddings/IVectorize.md new file mode 100644 index 000000000..8388f9204 --- /dev/null +++ b/docs/docs/src/swarmauri_core/embeddings/IVectorize.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.embeddings.IVectorize.IVectorize` + +::: swarmauri_core.embeddings.IVectorize.IVectorize + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/experiment_stores.md b/docs/docs/src/swarmauri_core/experiment_stores.md new file mode 100644 index 000000000..22e7ad627 --- /dev/null +++ b/docs/docs/src/swarmauri_core/experiment_stores.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.experiment_stores` + +::: swarmauri_core.experiment_stores + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/experiment_stores/IExperimentStore.md b/docs/docs/src/swarmauri_core/experiment_stores/IExperimentStore.md new file mode 100644 index 000000000..d585640f9 --- /dev/null +++ b/docs/docs/src/swarmauri_core/experiment_stores/IExperimentStore.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.experiment_stores.IExperimentStore.IExperimentStore` + +::: swarmauri_core.experiment_stores.IExperimentStore.IExperimentStore + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/factories.md b/docs/docs/src/swarmauri_core/factories.md new file mode 100644 index 000000000..c39ac9eff --- /dev/null +++ b/docs/docs/src/swarmauri_core/factories.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.factories` + +::: swarmauri_core.factories + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/factories/IFactory.md b/docs/docs/src/swarmauri_core/factories/IFactory.md new file mode 100644 index 000000000..34b3e1e98 --- /dev/null +++ b/docs/docs/src/swarmauri_core/factories/IFactory.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.factories.IFactory.IFactory` + +::: swarmauri_core.factories.IFactory.IFactory + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/image_gens.md b/docs/docs/src/swarmauri_core/image_gens.md new file mode 100644 index 000000000..c932ef805 --- /dev/null +++ b/docs/docs/src/swarmauri_core/image_gens.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.image_gens` + +::: swarmauri_core.image_gens + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/image_gens/IGenImage.md b/docs/docs/src/swarmauri_core/image_gens/IGenImage.md new file mode 100644 index 000000000..e6c27e05d --- /dev/null +++ b/docs/docs/src/swarmauri_core/image_gens/IGenImage.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.image_gens.IGenImage.IGenImage` + +::: swarmauri_core.image_gens.IGenImage.IGenImage + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/index.md b/docs/docs/src/swarmauri_core/index.md new file mode 100644 index 000000000..3bcaeca88 --- /dev/null +++ b/docs/docs/src/swarmauri_core/index.md @@ -0,0 +1,3 @@ +# Welcome + +This is the home page. diff --git a/docs/docs/src/swarmauri_core/llms.md b/docs/docs/src/swarmauri_core/llms.md new file mode 100644 index 000000000..b052edbea --- /dev/null +++ b/docs/docs/src/swarmauri_core/llms.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.llms` + +::: swarmauri_core.llms + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/llms/IFit.md b/docs/docs/src/swarmauri_core/llms/IFit.md new file mode 100644 index 000000000..1ae491868 --- /dev/null +++ b/docs/docs/src/swarmauri_core/llms/IFit.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.llms.IFit.IFit` + +::: swarmauri_core.llms.IFit.IFit + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/llms/IPredict.md b/docs/docs/src/swarmauri_core/llms/IPredict.md new file mode 100644 index 000000000..065dd7dd8 --- /dev/null +++ b/docs/docs/src/swarmauri_core/llms/IPredict.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.llms.IPredict.IPredict` + +::: swarmauri_core.llms.IPredict.IPredict + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/measurements.md b/docs/docs/src/swarmauri_core/measurements.md new file mode 100644 index 000000000..da393d4df --- /dev/null +++ b/docs/docs/src/swarmauri_core/measurements.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.measurements` + +::: swarmauri_core.measurements + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/measurements/IMeasurement.md b/docs/docs/src/swarmauri_core/measurements/IMeasurement.md new file mode 100644 index 000000000..3f4c5684c --- /dev/null +++ b/docs/docs/src/swarmauri_core/measurements/IMeasurement.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.measurements.IMeasurement.IMeasurement` + +::: swarmauri_core.measurements.IMeasurement.IMeasurement + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/measurements/IMeasurementAggregate.md b/docs/docs/src/swarmauri_core/measurements/IMeasurementAggregate.md new file mode 100644 index 000000000..6e231fa21 --- /dev/null +++ b/docs/docs/src/swarmauri_core/measurements/IMeasurementAggregate.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.measurements.IMeasurementAggregate.IMeasurementAggregate` + +::: swarmauri_core.measurements.IMeasurementAggregate.IMeasurementAggregate + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/measurements/IMeasurementCalculate.md b/docs/docs/src/swarmauri_core/measurements/IMeasurementCalculate.md new file mode 100644 index 000000000..7243baeef --- /dev/null +++ b/docs/docs/src/swarmauri_core/measurements/IMeasurementCalculate.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.measurements.IMeasurementCalculate.IMeasurementCalculate` + +::: swarmauri_core.measurements.IMeasurementCalculate.IMeasurementCalculate + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/measurements/IThreshold.md b/docs/docs/src/swarmauri_core/measurements/IThreshold.md new file mode 100644 index 000000000..8ad276e5f --- /dev/null +++ b/docs/docs/src/swarmauri_core/measurements/IThreshold.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.measurements.IThreshold.IThreshold` + +::: swarmauri_core.measurements.IThreshold.IThreshold + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/messages.md b/docs/docs/src/swarmauri_core/messages.md new file mode 100644 index 000000000..d4e80a674 --- /dev/null +++ b/docs/docs/src/swarmauri_core/messages.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.messages` + +::: swarmauri_core.messages + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/messages/IMessage.md b/docs/docs/src/swarmauri_core/messages/IMessage.md new file mode 100644 index 000000000..f9ab8b3a5 --- /dev/null +++ b/docs/docs/src/swarmauri_core/messages/IMessage.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.messages.IMessage.IMessage` + +::: swarmauri_core.messages.IMessage.IMessage + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/parsers.md b/docs/docs/src/swarmauri_core/parsers.md new file mode 100644 index 000000000..986afccdc --- /dev/null +++ b/docs/docs/src/swarmauri_core/parsers.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.parsers` + +::: swarmauri_core.parsers + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/parsers/IParser.md b/docs/docs/src/swarmauri_core/parsers/IParser.md new file mode 100644 index 000000000..4a03f1398 --- /dev/null +++ b/docs/docs/src/swarmauri_core/parsers/IParser.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.parsers.IParser.IParser` + +::: swarmauri_core.parsers.IParser.IParser + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/pipelines.md b/docs/docs/src/swarmauri_core/pipelines.md new file mode 100644 index 000000000..16cb54ddc --- /dev/null +++ b/docs/docs/src/swarmauri_core/pipelines.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.pipelines` + +::: swarmauri_core.pipelines + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/pipelines/IPipeline.md b/docs/docs/src/swarmauri_core/pipelines/IPipeline.md new file mode 100644 index 000000000..5e0acbb25 --- /dev/null +++ b/docs/docs/src/swarmauri_core/pipelines/IPipeline.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.pipelines.IPipeline.IPipeline` + +::: swarmauri_core.pipelines.IPipeline.IPipeline + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/pipelines/PipelineStatus.md b/docs/docs/src/swarmauri_core/pipelines/PipelineStatus.md new file mode 100644 index 000000000..35cb426a9 --- /dev/null +++ b/docs/docs/src/swarmauri_core/pipelines/PipelineStatus.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.pipelines.IPipeline.PipelineStatus` + +::: swarmauri_core.pipelines.IPipeline.PipelineStatus + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/prompt_templates.md b/docs/docs/src/swarmauri_core/prompt_templates.md new file mode 100644 index 000000000..9b31c6769 --- /dev/null +++ b/docs/docs/src/swarmauri_core/prompt_templates.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.prompt_templates` + +::: swarmauri_core.prompt_templates + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/prompt_templates/IPromptTemplate.md b/docs/docs/src/swarmauri_core/prompt_templates/IPromptTemplate.md new file mode 100644 index 000000000..a31db8a94 --- /dev/null +++ b/docs/docs/src/swarmauri_core/prompt_templates/IPromptTemplate.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.prompt_templates.IPromptTemplate.IPromptTemplate` + +::: swarmauri_core.prompt_templates.IPromptTemplate.IPromptTemplate + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/prompts.md b/docs/docs/src/swarmauri_core/prompts.md new file mode 100644 index 000000000..48e722733 --- /dev/null +++ b/docs/docs/src/swarmauri_core/prompts.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.prompts` + +::: swarmauri_core.prompts + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/prompts/IPrompt.md b/docs/docs/src/swarmauri_core/prompts/IPrompt.md new file mode 100644 index 000000000..785f6d2ef --- /dev/null +++ b/docs/docs/src/swarmauri_core/prompts/IPrompt.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.prompts.IPrompt.IPrompt` + +::: swarmauri_core.prompts.IPrompt.IPrompt + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/prompts/IPromptMatrix.md b/docs/docs/src/swarmauri_core/prompts/IPromptMatrix.md new file mode 100644 index 000000000..f964560ef --- /dev/null +++ b/docs/docs/src/swarmauri_core/prompts/IPromptMatrix.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.prompts.IPromptMatrix.IPromptMatrix` + +::: swarmauri_core.prompts.IPromptMatrix.IPromptMatrix + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/prompts/ITemplate.md b/docs/docs/src/swarmauri_core/prompts/ITemplate.md new file mode 100644 index 000000000..ee8a0dad5 --- /dev/null +++ b/docs/docs/src/swarmauri_core/prompts/ITemplate.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.prompts.ITemplate.ITemplate` + +::: swarmauri_core.prompts.ITemplate.ITemplate + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/schema_converters.md b/docs/docs/src/swarmauri_core/schema_converters.md new file mode 100644 index 000000000..6ccade60a --- /dev/null +++ b/docs/docs/src/swarmauri_core/schema_converters.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.schema_converters` + +::: swarmauri_core.schema_converters + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/schema_converters/ISchemaConvert.md b/docs/docs/src/swarmauri_core/schema_converters/ISchemaConvert.md new file mode 100644 index 000000000..26ac67ced --- /dev/null +++ b/docs/docs/src/swarmauri_core/schema_converters/ISchemaConvert.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.schema_converters.ISchemaConvert.ISchemaConvert` + +::: swarmauri_core.schema_converters.ISchemaConvert.ISchemaConvert + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/service_registries.md b/docs/docs/src/swarmauri_core/service_registries.md new file mode 100644 index 000000000..f86600936 --- /dev/null +++ b/docs/docs/src/swarmauri_core/service_registries.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.service_registries` + +::: swarmauri_core.service_registries + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/service_registries/IServiceRegistry.md b/docs/docs/src/swarmauri_core/service_registries/IServiceRegistry.md new file mode 100644 index 000000000..78557ff4e --- /dev/null +++ b/docs/docs/src/swarmauri_core/service_registries/IServiceRegistry.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.service_registries.IServiceRegistry.IServiceRegistry` + +::: swarmauri_core.service_registries.IServiceRegistry.IServiceRegistry + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/swarm_apis.md b/docs/docs/src/swarmauri_core/swarm_apis.md new file mode 100644 index 000000000..077436e46 --- /dev/null +++ b/docs/docs/src/swarmauri_core/swarm_apis.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.swarm_apis` + +::: swarmauri_core.swarm_apis + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/swarm_apis/IAgentRegistrationAPI.md b/docs/docs/src/swarmauri_core/swarm_apis/IAgentRegistrationAPI.md new file mode 100644 index 000000000..772600d30 --- /dev/null +++ b/docs/docs/src/swarmauri_core/swarm_apis/IAgentRegistrationAPI.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.swarm_apis.IAgentRegistrationAPI.IAgentRegistrationAPI` + +::: swarmauri_core.swarm_apis.IAgentRegistrationAPI.IAgentRegistrationAPI + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/swarm_apis/ISwarmAPI.md b/docs/docs/src/swarmauri_core/swarm_apis/ISwarmAPI.md new file mode 100644 index 000000000..a0216f404 --- /dev/null +++ b/docs/docs/src/swarmauri_core/swarm_apis/ISwarmAPI.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.swarm_apis.ISwarmAPI.ISwarmAPI` + +::: swarmauri_core.swarm_apis.ISwarmAPI.ISwarmAPI + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/swarms.md b/docs/docs/src/swarmauri_core/swarms.md new file mode 100644 index 000000000..10d0b06a9 --- /dev/null +++ b/docs/docs/src/swarmauri_core/swarms.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.swarms` + +::: swarmauri_core.swarms + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/swarms/ISwarm.md b/docs/docs/src/swarmauri_core/swarms/ISwarm.md new file mode 100644 index 000000000..fbab750e9 --- /dev/null +++ b/docs/docs/src/swarmauri_core/swarms/ISwarm.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.swarms.ISwarm.ISwarm` + +::: swarmauri_core.swarms.ISwarm.ISwarm + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/task_mgmt_strategies.md b/docs/docs/src/swarmauri_core/task_mgmt_strategies.md new file mode 100644 index 000000000..b5db9f8fc --- /dev/null +++ b/docs/docs/src/swarmauri_core/task_mgmt_strategies.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.task_mgmt_strategies` + +::: swarmauri_core.task_mgmt_strategies + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/task_mgmt_strategies/ITaskMgmtStrategy.md b/docs/docs/src/swarmauri_core/task_mgmt_strategies/ITaskMgmtStrategy.md new file mode 100644 index 000000000..5ac174854 --- /dev/null +++ b/docs/docs/src/swarmauri_core/task_mgmt_strategies/ITaskMgmtStrategy.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.task_mgmt_strategies.ITaskMgmtStrategy.ITaskMgmtStrategy` + +::: swarmauri_core.task_mgmt_strategies.ITaskMgmtStrategy.ITaskMgmtStrategy + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/toolkits.md b/docs/docs/src/swarmauri_core/toolkits.md new file mode 100644 index 000000000..dbffde97e --- /dev/null +++ b/docs/docs/src/swarmauri_core/toolkits.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.toolkits` + +::: swarmauri_core.toolkits + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/toolkits/IToolkit.md b/docs/docs/src/swarmauri_core/toolkits/IToolkit.md new file mode 100644 index 000000000..2744d02bb --- /dev/null +++ b/docs/docs/src/swarmauri_core/toolkits/IToolkit.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.toolkits.IToolkit.IToolkit` + +::: swarmauri_core.toolkits.IToolkit.IToolkit + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/tools.md b/docs/docs/src/swarmauri_core/tools.md new file mode 100644 index 000000000..781ef930a --- /dev/null +++ b/docs/docs/src/swarmauri_core/tools.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.tools` + +::: swarmauri_core.tools + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/tools/IParameter.md b/docs/docs/src/swarmauri_core/tools/IParameter.md new file mode 100644 index 000000000..07eb8bd58 --- /dev/null +++ b/docs/docs/src/swarmauri_core/tools/IParameter.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.tools.IParameter.IParameter` + +::: swarmauri_core.tools.IParameter.IParameter + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/tools/ITool.md b/docs/docs/src/swarmauri_core/tools/ITool.md new file mode 100644 index 000000000..134428c99 --- /dev/null +++ b/docs/docs/src/swarmauri_core/tools/ITool.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.tools.ITool.ITool` + +::: swarmauri_core.tools.ITool.ITool + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/tracing.md b/docs/docs/src/swarmauri_core/tracing.md new file mode 100644 index 000000000..264075423 --- /dev/null +++ b/docs/docs/src/swarmauri_core/tracing.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.tracing` + +::: swarmauri_core.tracing + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/tracing/IChainTracer.md b/docs/docs/src/swarmauri_core/tracing/IChainTracer.md new file mode 100644 index 000000000..9db919710 --- /dev/null +++ b/docs/docs/src/swarmauri_core/tracing/IChainTracer.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.tracing.IChainTracer.IChainTracer` + +::: swarmauri_core.tracing.IChainTracer.IChainTracer + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/tracing/ITraceContext.md b/docs/docs/src/swarmauri_core/tracing/ITraceContext.md new file mode 100644 index 000000000..e718606b1 --- /dev/null +++ b/docs/docs/src/swarmauri_core/tracing/ITraceContext.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.tracing.ITraceContext.ITraceContext` + +::: swarmauri_core.tracing.ITraceContext.ITraceContext + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/tracing/ITracer.md b/docs/docs/src/swarmauri_core/tracing/ITracer.md new file mode 100644 index 000000000..54dc84249 --- /dev/null +++ b/docs/docs/src/swarmauri_core/tracing/ITracer.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.tracing.ITracer.ITracer` + +::: swarmauri_core.tracing.ITracer.ITracer + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/transports.md b/docs/docs/src/swarmauri_core/transports.md new file mode 100644 index 000000000..3ecd44e38 --- /dev/null +++ b/docs/docs/src/swarmauri_core/transports.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.transports` + +::: swarmauri_core.transports + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/transports/ITransport.md b/docs/docs/src/swarmauri_core/transports/ITransport.md new file mode 100644 index 000000000..5d1fda51a --- /dev/null +++ b/docs/docs/src/swarmauri_core/transports/ITransport.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.transports.ITransport.ITransport` + +::: swarmauri_core.transports.ITransport.ITransport + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/utils.md b/docs/docs/src/swarmauri_core/utils.md new file mode 100644 index 000000000..33fd691af --- /dev/null +++ b/docs/docs/src/swarmauri_core/utils.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.utils` + +::: swarmauri_core.utils + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/utils/ITransactional.md b/docs/docs/src/swarmauri_core/utils/ITransactional.md new file mode 100644 index 000000000..57fdab022 --- /dev/null +++ b/docs/docs/src/swarmauri_core/utils/ITransactional.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.utils.ITransactional.ITransactional` + +::: swarmauri_core.utils.ITransactional.ITransactional + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vcms.md b/docs/docs/src/swarmauri_core/vcms.md new file mode 100644 index 000000000..4ae1d5269 --- /dev/null +++ b/docs/docs/src/swarmauri_core/vcms.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.vcms` + +::: swarmauri_core.vcms + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/vcms/IPredictVision.md b/docs/docs/src/swarmauri_core/vcms/IPredictVision.md new file mode 100644 index 000000000..7598ac0ae --- /dev/null +++ b/docs/docs/src/swarmauri_core/vcms/IPredictVision.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vcms.IPredictVision.IPredictVision` + +::: swarmauri_core.vcms.IPredictVision.IPredictVision + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vector_stores.md b/docs/docs/src/swarmauri_core/vector_stores.md new file mode 100644 index 000000000..160e102b4 --- /dev/null +++ b/docs/docs/src/swarmauri_core/vector_stores.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.vector_stores` + +::: swarmauri_core.vector_stores + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/vector_stores/ICloudVectorStore.md b/docs/docs/src/swarmauri_core/vector_stores/ICloudVectorStore.md new file mode 100644 index 000000000..9755e26dd --- /dev/null +++ b/docs/docs/src/swarmauri_core/vector_stores/ICloudVectorStore.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vector_stores.ICloudVectorStore.ICloudVectorStore` + +::: swarmauri_core.vector_stores.ICloudVectorStore.ICloudVectorStore + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vector_stores/IPersistentVectorStore.md b/docs/docs/src/swarmauri_core/vector_stores/IPersistentVectorStore.md new file mode 100644 index 000000000..57943198a --- /dev/null +++ b/docs/docs/src/swarmauri_core/vector_stores/IPersistentVectorStore.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vector_stores.IPersistentVectorStore.IPersistentVectorStore` + +::: swarmauri_core.vector_stores.IPersistentVectorStore.IPersistentVectorStore + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vector_stores/ISimilarity.md b/docs/docs/src/swarmauri_core/vector_stores/ISimilarity.md new file mode 100644 index 000000000..b92849c5b --- /dev/null +++ b/docs/docs/src/swarmauri_core/vector_stores/ISimilarity.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vector_stores.ISimilarity.ISimilarity` + +::: swarmauri_core.vector_stores.ISimilarity.ISimilarity + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vector_stores/IVectorNorm.md b/docs/docs/src/swarmauri_core/vector_stores/IVectorNorm.md new file mode 100644 index 000000000..986748724 --- /dev/null +++ b/docs/docs/src/swarmauri_core/vector_stores/IVectorNorm.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vector_stores.IVectorNorm.IVectorNorm` + +::: swarmauri_core.vector_stores.IVectorNorm.IVectorNorm + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vector_stores/IVectorStore.md b/docs/docs/src/swarmauri_core/vector_stores/IVectorStore.md new file mode 100644 index 000000000..c8abde97e --- /dev/null +++ b/docs/docs/src/swarmauri_core/vector_stores/IVectorStore.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vector_stores.IVectorStore.IVectorStore` + +::: swarmauri_core.vector_stores.IVectorStore.IVectorStore + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vector_stores/IVectorStoreRetrieve.md b/docs/docs/src/swarmauri_core/vector_stores/IVectorStoreRetrieve.md new file mode 100644 index 000000000..0586862c2 --- /dev/null +++ b/docs/docs/src/swarmauri_core/vector_stores/IVectorStoreRetrieve.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vector_stores.IVectorStoreRetrieve.IVectorStoreRetrieve` + +::: swarmauri_core.vector_stores.IVectorStoreRetrieve.IVectorStoreRetrieve + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vector_stores/IVectorStoreSaveLoad.md b/docs/docs/src/swarmauri_core/vector_stores/IVectorStoreSaveLoad.md new file mode 100644 index 000000000..dd8efbd3e --- /dev/null +++ b/docs/docs/src/swarmauri_core/vector_stores/IVectorStoreSaveLoad.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vector_stores.IVectorStoreSaveLoad.IVectorStoreSaveLoad` + +::: swarmauri_core.vector_stores.IVectorStoreSaveLoad.IVectorStoreSaveLoad + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vectors.md b/docs/docs/src/swarmauri_core/vectors.md new file mode 100644 index 000000000..5d3b4beed --- /dev/null +++ b/docs/docs/src/swarmauri_core/vectors.md @@ -0,0 +1,9 @@ +# Documentation for `swarmauri_core.vectors` + +::: swarmauri_core.vectors + options.extra: + show_submodules: false + show_inheritance: false + filters: + - '!.*' # exclude everything but the module docstring + diff --git a/docs/docs/src/swarmauri_core/vectors/IVector.md b/docs/docs/src/swarmauri_core/vectors/IVector.md new file mode 100644 index 000000000..a1cb9fb17 --- /dev/null +++ b/docs/docs/src/swarmauri_core/vectors/IVector.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vectors.IVector.IVector` + +::: swarmauri_core.vectors.IVector.IVector + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vectors/IVectorMeta.md b/docs/docs/src/swarmauri_core/vectors/IVectorMeta.md new file mode 100644 index 000000000..d70d8a590 --- /dev/null +++ b/docs/docs/src/swarmauri_core/vectors/IVectorMeta.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vectors.IVectorMeta.IVectorMeta` + +::: swarmauri_core.vectors.IVectorMeta.IVectorMeta + options.extra: + show_inheritance: true + diff --git a/docs/docs/src/swarmauri_core/vectors/IVectorProduct.md b/docs/docs/src/swarmauri_core/vectors/IVectorProduct.md new file mode 100644 index 000000000..7bbb79f52 --- /dev/null +++ b/docs/docs/src/swarmauri_core/vectors/IVectorProduct.md @@ -0,0 +1,6 @@ +# Class `swarmauri_core.vectors.IVectorProduct.IVectorProduct` + +::: swarmauri_core.vectors.IVectorProduct.IVectorProduct + options.extra: + show_inheritance: true + diff --git a/docs/docs/standalone/index.md b/docs/docs/standalone/index.md new file mode 100644 index 000000000..f9120380a --- /dev/null +++ b/docs/docs/standalone/index.md @@ -0,0 +1 @@ +# Standalone \ No newline at end of file diff --git a/docs/mdocks.insiders.yaml b/docs/mdocks.insiders.yaml new file mode 100644 index 000000000..1e5c55736 --- /dev/null +++ b/docs/mdocks.insiders.yaml @@ -0,0 +1,10 @@ +plugins: + social: + cards_layout_options: + logo: ../en/docs/img/icon-white.svg + typeset: +markdown_extensions: + material.extensions.preview: + targets: + include: + - "*" \ No newline at end of file diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml new file mode 100644 index 000000000..6d55421d1 --- /dev/null +++ b/docs/mkdocs.yml @@ -0,0 +1,228 @@ +site_name: Swarmauri SDK +repo_url: https://github.com/swarmauri/swarmauri-sdk +repo_name: swarmauri/swarmauri-sdk + +theme: + name: material + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.footnote.tooltips + - content.tabs.link + - content.tooltips + - navigation.footer + - navigation.indexes + - navigation.instant + - navigation.instant.prefetch + - navigation.instant.progress + - navigation.path + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - navigation.tracking + - search.highlight + - search.share + - search.suggest + - toc.follow + icon: + repo: fontawesome/brands/github + +plugins: +- mkdocstrings: + handlers: + python: + options: + extensions: + - griffe_typingdoc + show_root_heading: true + show_if_no_docstring: true + inherited_members: true + members_order: source + separate_signature: true + unwrap_annotated: true + filters: + - '!^_' + merge_init_into_class: true + docstring_section_style: spacy + signature_crossrefs: true + show_symbol_type_heading: true + show_symbol_type_toc: true + show_source: true +- blog: + blog_dir: news +- tags: + enabled: true +- search: null +- autorefs: null + +markdown_extensions: + - admonition + - pymdownx.details + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - abbr + - attr_list + - pymdownx.snippets + - pymdownx.tasklist: + custom_checkbox: true +extra: + alternate: + - name: English + link: /en/ + lang: en + +nav: +- Home: index.md +- Announcements: + - news/index.md +- Guide: + - Installation: guide/installation.md + - Usage: guide/usage.md +- Foundation: + - foundation/index.md + - Core: api/module1.md + - Base: api/module2.md + - Standard: api/module2.md + - Swarmauri: api/module2.md +- Standalone: + - standalone/index.md + - First Class: api/module2.md + - Second Class: api/module2.md + - Third Class: api/module2.md +- core: + - Home: src\swarmauri_core\index.md + - ComponentBase: src\swarmauri_core\ComponentBase.md + - ResourceType: src\swarmauri_core\ResourceType.md + - ResourceTypes: src\swarmauri_core\ResourceTypes.md + - SubclassUnion: src\swarmauri_core\SubclassUnion.md + - IAgentCommands: src\swarmauri_core/agent_apis\IAgentCommands.md + - IAgentRouterCRUD: src\swarmauri_core/agent_apis\IAgentRouterCRUD.md + - IExportConf: src\swarmauri_core/agent_factories\IExportConf.md + - IAgent: src\swarmauri_core/agents\IAgent.md + - IAgentConversation: src\swarmauri_core/agents\IAgentConversation.md + - IAgentParser: src\swarmauri_core/agents\IAgentParser.md + - IAgentRetrieve: src\swarmauri_core/agents\IAgentRetrieve.md + - IAgentSystemContext: src\swarmauri_core/agents\IAgentSystemContext.md + - IAgentToolkit: src\swarmauri_core/agents\IAgentToolkit.md + - IAgentVectorStore: src\swarmauri_core/agents\IAgentVectorStore.md + - ICallableChain: src\swarmauri_core/chains\ICallableChain.md + - IChain: src\swarmauri_core/chains\IChain.md + - IChainContext: src\swarmauri_core/chains\IChainContext.md + - IChainContextLoader: src\swarmauri_core/chains\IChainContextLoader.md + - IChainFactory: src\swarmauri_core/chains\IChainFactory.md + - IChainStep: src\swarmauri_core/chains\IChainStep.md + - IChunker: src\swarmauri_core/chunkers\IChunker.md + - IControlPlane: src\swarmauri_core/control_panels\IControlPlane.md + - IConversation: src\swarmauri_core/conversations\IConversation.md + - IMaxSize: src\swarmauri_core/conversations\IMaxSize.md + - ISystemContext: src\swarmauri_core/conversations\ISystemContext.md + - IDataConnector: src\swarmauri_core/dataconnectors\IDataConnector.md + - IDistanceSimilarity: src\swarmauri_core/distances\IDistanceSimilarity.md + - IDocumentRetrieve: src\swarmauri_core/document_stores\IDocumentRetrieve.md + - IDocumentStore: src\swarmauri_core/document_stores\IDocumentStore.md + - IDocument: src\swarmauri_core/documents\IDocument.md + - IExperimentDocument: src\swarmauri_core/documents\IExperimentDocument.md + - IFeature: src\swarmauri_core/embeddings\IFeature.md + - ISaveModel: src\swarmauri_core/embeddings\ISaveModel.md + - IVectorize: src\swarmauri_core/embeddings\IVectorize.md + - IExperimentStore: src\swarmauri_core/experiment_stores\IExperimentStore.md + - IFactory: src\swarmauri_core/factories\IFactory.md + - IGenImage: src\swarmauri_core/image_gens\IGenImage.md + - IFit: src\swarmauri_core/llms\IFit.md + - IPredict: src\swarmauri_core/llms\IPredict.md + - IMeasurement: src\swarmauri_core/measurements\IMeasurement.md + - IMeasurementAggregate: src\swarmauri_core/measurements\IMeasurementAggregate.md + - IMeasurementCalculate: src\swarmauri_core/measurements\IMeasurementCalculate.md + - IThreshold: src\swarmauri_core/measurements\IThreshold.md + - IMessage: src\swarmauri_core/messages\IMessage.md + - IParser: src\swarmauri_core/parsers\IParser.md + - IPipeline: src\swarmauri_core/pipelines\IPipeline.md + - PipelineStatus: src\swarmauri_core/pipelines\PipelineStatus.md + - IPromptTemplate: src\swarmauri_core/prompt_templates\IPromptTemplate.md + - IPrompt: src\swarmauri_core/prompts\IPrompt.md + - IPromptMatrix: src\swarmauri_core/prompts\IPromptMatrix.md + - ITemplate: src\swarmauri_core/prompts\ITemplate.md + - ISchemaConvert: src\swarmauri_core/schema_converters\ISchemaConvert.md + - IServiceRegistry: src\swarmauri_core/service_registries\IServiceRegistry.md + - IAgentRegistrationAPI: src\swarmauri_core/swarm_apis\IAgentRegistrationAPI.md + - ISwarmAPI: src\swarmauri_core/swarm_apis\ISwarmAPI.md + - ISwarm: src\swarmauri_core/swarms\ISwarm.md + - ITaskMgmtStrategy: src\swarmauri_core/task_mgmt_strategies\ITaskMgmtStrategy.md + - IToolkit: src\swarmauri_core/toolkits\IToolkit.md + - IParameter: src\swarmauri_core/tools\IParameter.md + - ITool: src\swarmauri_core/tools\ITool.md + - IChainTracer: src\swarmauri_core/tracing\IChainTracer.md + - ITraceContext: src\swarmauri_core/tracing\ITraceContext.md + - ITracer: src\swarmauri_core/tracing\ITracer.md + - ITransport: src\swarmauri_core/transports\ITransport.md + - ITransactional: src\swarmauri_core/utils\ITransactional.md + - IPredictVision: src\swarmauri_core/vcms\IPredictVision.md + - ICloudVectorStore: src\swarmauri_core/vector_stores\ICloudVectorStore.md + - IPersistentVectorStore: src\swarmauri_core/vector_stores\IPersistentVectorStore.md + - ISimilarity: src\swarmauri_core/vector_stores\ISimilarity.md + - IVectorNorm: src\swarmauri_core/vector_stores\IVectorNorm.md + - IVectorStore: src\swarmauri_core/vector_stores\IVectorStore.md + - IVectorStoreRetrieve: src\swarmauri_core/vector_stores\IVectorStoreRetrieve.md + - IVectorStoreSaveLoad: src\swarmauri_core/vector_stores\IVectorStoreSaveLoad.md + - IVector: src\swarmauri_core/vectors\IVector.md + - IVectorMeta: src\swarmauri_core/vectors\IVectorMeta.md + - IVectorProduct: src\swarmauri_core/vectors\IVectorProduct.md +- base: + - Home: src\swarmauri_base\index.md + - AgentBase: src\swarmauri_base/agents\AgentBase.md + - AgentConversationMixin: src\swarmauri_base/agents\AgentConversationMixin.md + - AgentRetrieveMixin: src\swarmauri_base/agents\AgentRetrieveMixin.md + - AgentSystemContextMixin: src\swarmauri_base/agents\AgentSystemContextMixin.md + - AgentToolMixin: src\swarmauri_base/agents\AgentToolMixin.md + - AgentVectorStoreMixin: src\swarmauri_base/agents\AgentVectorStoreMixin.md + - ChainBase: src\swarmauri_base/chains\ChainBase.md + - ChainContextBase: src\swarmauri_base/chains\ChainContextBase.md + - ChainStepBase: src\swarmauri_base/chains\ChainStepBase.md + - ChunkerBase: src\swarmauri_base/chunkers\ChunkerBase.md + - ControlPanelBase: src\swarmauri_base/control_panels\ControlPanelBase.md + - ConversationBase: src\swarmauri_base/conversations\ConversationBase.md + - ConversationSystemContextMixin: src\swarmauri_base/conversations\ConversationSystemContextMixin.md + - DataConnectorBase: src\swarmauri_base/dataconnectors\DataConnectorBase.md + - DistanceBase: src\swarmauri_base/distances\DistanceBase.md + - VisionDistanceBase: src\swarmauri_base/distances\VisionDistanceBase.md + - DocumentStoreBase: src\swarmauri_base/document_stores\DocumentStoreBase.md + - DocumentStoreRetrieveBase: src\swarmauri_base/document_stores\DocumentStoreRetrieveBase.md + - DocumentBase: src\swarmauri_base/documents\DocumentBase.md + - EmbeddingBase: src\swarmauri_base/embeddings\EmbeddingBase.md + - VisionEmbeddingBase: src\swarmauri_base/embeddings\VisionEmbeddingBase.md + - ImageGenBase: src\swarmauri_base/image_gens\ImageGenBase.md + - LLMBase: src\swarmauri_base/llms\LLMBase.md + - MeasurementAggregateMixin: src\swarmauri_base/measurements\MeasurementAggregateMixin.md + - MeasurementBase: src\swarmauri_base/measurements\MeasurementBase.md + - MeasurementCalculateMixin: src\swarmauri_base/measurements\MeasurementCalculateMixin.md + - MeasurementThresholdMixin: src\swarmauri_base/measurements\MeasurementThresholdMixin.md + - MessageBase: src\swarmauri_base/messages\MessageBase.md + - ParserBase: src\swarmauri_base/parsers\ParserBase.md + - PipelineBase: src\swarmauri_base/pipelines\PipelineBase.md + - PromptTemplateBase: src\swarmauri_base/prompt_templates\PromptTemplateBase.md + - PromptBase: src\swarmauri_base/prompts\PromptBase.md + - PromptGeneratorBase: src\swarmauri_base/prompts\PromptGeneratorBase.md + - PromptMatrixBase: src\swarmauri_base/prompts\PromptMatrixBase.md + - SchemaConverterBase: src\swarmauri_base/schema_converters\SchemaConverterBase.md + - ServiceRegistryBase: src\swarmauri_base/service_registries\ServiceRegistryBase.md + - StateBase: src\swarmauri_base/state\StateBase.md + - SwarmBase: src\swarmauri_base/swarms\SwarmBase.md + - SwarmStatus: src\swarmauri_base/swarms\SwarmStatus.md + - TaskMgmtStrategyBase: src\swarmauri_base/task_mgmt_strategies\TaskMgmtStrategyBase.md + - ToolkitBase: src\swarmauri_base/toolkits\ToolkitBase.md + - ParameterBase: src\swarmauri_base/tools\ParameterBase.md + - ToolBase: src\swarmauri_base/tools\ToolBase.md + - TransportBase: src\swarmauri_base/transports\TransportBase.md + - TransportProtocol: src\swarmauri_base/transports\TransportProtocol.md + - VectorStoreBase: src\swarmauri_base/vector_stores\VectorStoreBase.md + - VectorStoreCloudMixin: src\swarmauri_base/vector_stores\VectorStoreCloudMixin.md + - VectorStorePersistentMixin: src\swarmauri_base/vector_stores\VectorStorePersistentMixin.md + - VectorStoreRetrieveMixin: src\swarmauri_base/vector_stores\VectorStoreRetrieveMixin.md + - VectorStoreSaveLoadMixin: src\swarmauri_base/vector_stores\VectorStoreSaveLoadMixin.md + - VisionVectorStoreBase: src\swarmauri_base/vector_stores\VisionVectorStoreBase.md + - VectorBase: src\swarmauri_base/vectors\VectorBase.md diff --git a/docs/poetry.lock b/docs/poetry.lock new file mode 100644 index 000000000..f197f05f1 --- /dev/null +++ b/docs/poetry.lock @@ -0,0 +1,889 @@ +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. + +[[package]] +name = "babel" +version = "2.17.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, +] + +[package.extras] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] + +[[package]] +name = "certifi" +version = "2025.1.31" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["flake8", "markdown", "twine", "wheel"] + +[[package]] +name = "griffe" +version = "1.5.7" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "griffe-1.5.7-py3-none-any.whl", hash = "sha256:4af8ec834b64de954d447c7b6672426bb145e71605c74a4e22d510cc79fe7d8b"}, + {file = "griffe-1.5.7.tar.gz", hash = "sha256:465238c86deaf1137761f700fb343edd8ffc846d72f6de43c3c345ccdfbebe92"}, +] + +[package.dependencies] +colorama = ">=0.4" + +[[package]] +name = "griffe-typingdoc" +version = "0.2.8" +description = "Griffe extension for PEP 727 – Documentation Metadata in Typing." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "griffe_typingdoc-0.2.8-py3-none-any.whl", hash = "sha256:a4ed3dd73b9d48311b138d8b317916a0589325a73c525236bf5969a8fe2626b1"}, + {file = "griffe_typingdoc-0.2.8.tar.gz", hash = "sha256:36f2c2f2568240a5d0ab462153d1f3cfec01a9cc56b2291f16ce7869f0f7af05"}, +] + +[package.dependencies] +griffe = ">=0.49" +typing-extensions = ">=4.7" + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "jinja2" +version = "3.1.5" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markdown" +version = "3.7" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, +] + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] + +[[package]] +name = "mkdocs" +version = "1.6.1" +description = "Project documentation with Markdown." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"}, + {file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +jinja2 = ">=2.11.1" +markdown = ">=3.3.6" +markupsafe = ">=2.0.1" +mergedeep = ">=1.3.4" +mkdocs-get-deps = ">=0.2.0" +packaging = ">=20.5" +pathspec = ">=0.11.1" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.4)", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] + +[[package]] +name = "mkdocs-autorefs" +version = "1.3.1" +description = "Automatically link across pages in MkDocs." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "mkdocs_autorefs-1.3.1-py3-none-any.whl", hash = "sha256:18c504ae4d3ee7f344369bb26cb31d4105569ee252aab7d75ec2734c2c8b0474"}, + {file = "mkdocs_autorefs-1.3.1.tar.gz", hash = "sha256:a6d30cbcccae336d622a66c2418a3c92a8196b69782774529ad441abb23c0902"}, +] + +[package.dependencies] +Markdown = ">=3.3" +markupsafe = ">=2.0.1" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-get-deps" +version = "0.2.0" +description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, + {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, +] + +[package.dependencies] +mergedeep = ">=1.3.4" +platformdirs = ">=2.2.0" +pyyaml = ">=5.1" + +[[package]] +name = "mkdocs-material" +version = "9.6.4" +description = "Documentation that simply works" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "mkdocs_material-9.6.4-py3-none-any.whl", hash = "sha256:414e8376551def6d644b8e6f77226022868532a792eb2c9accf52199009f568f"}, + {file = "mkdocs_material-9.6.4.tar.gz", hash = "sha256:4d1d35e1c1d3e15294cb7fa5d02e0abaee70d408f75027dc7be6e30fb32e6867"}, +] + +[package.dependencies] +babel = ">=2.10,<3.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.0,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.6,<2.0" +mkdocs-material-extensions = ">=1.3,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4" +requests = ">=2.26,<3.0" + +[package.extras] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<3)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] + +[[package]] +name = "mkdocs-material-extensions" +version = "1.3.1" +description = "Extension pack for Python Markdown and MkDocs Material." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, +] + +[[package]] +name = "mkdocstrings" +version = "0.28.1" +description = "Automatic documentation from sources, for MkDocs." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "mkdocstrings-0.28.1-py3-none-any.whl", hash = "sha256:a5878ae5cd1e26f491ff084c1f9ab995687d52d39a5c558e9b7023d0e4e0b740"}, + {file = "mkdocstrings-0.28.1.tar.gz", hash = "sha256:fb64576906771b7701e8e962fd90073650ff689e95eb86e86751a66d65ab4489"}, +] + +[package.dependencies] +Jinja2 = ">=2.11.1" +Markdown = ">=3.6" +MarkupSafe = ">=1.1" +mkdocs = ">=1.4" +mkdocs-autorefs = ">=1.3" +mkdocs-get-deps = ">=0.2" +mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""} +pymdown-extensions = ">=6.3" + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "1.16.0" +description = "A Python handler for mkdocstrings." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "mkdocstrings_python-1.16.0-py3-none-any.whl", hash = "sha256:80ecbcca67fe29eb5a853ab7be0457231f6eefb5d079bcdf7dbae16962fc3f51"}, + {file = "mkdocstrings_python-1.16.0.tar.gz", hash = "sha256:546d53c7d559941abc726b2f78a41c0183480a95ba0a87ccc63b9b08740126f9"}, +] + +[package.dependencies] +griffe = ">=0.49" +mkdocs-autorefs = ">=1.2" +mkdocstrings = ">=0.28" + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "paginate" +version = "0.5.7" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"}, + {file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"}, +] + +[package.extras] +dev = ["pytest", "tox"] +lint = ["black"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pymdown-extensions" +version = "10.14.3" +description = "Extension pack for Python Markdown." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pymdown_extensions-10.14.3-py3-none-any.whl", hash = "sha256:05e0bee73d64b9c71a4ae17c72abc2f700e8bc8403755a00580b49a4e9f189e9"}, + {file = "pymdown_extensions-10.14.3.tar.gz", hash = "sha256:41e576ce3f5d650be59e900e4ceff231e0aed2a88cf30acaee41e02f063a061b"}, +] + +[package.dependencies] +markdown = ">=3.6" +pyyaml = "*" + +[package.extras] +extra = ["pygments (>=2.19.1)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "regex" +version = "2024.11.6" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "watchdog" +version = "6.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, + {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, + {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, + {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, + {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.12,<3.13" +content-hash = "ae3aa76e42d6e342ce7e07116a6218ba8aff51492134a66e4ee9d51df8e17fa1" diff --git a/docs/pyproject.toml b/docs/pyproject.toml new file mode 100644 index 000000000..03c38cea1 --- /dev/null +++ b/docs/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "swarmauri-docs" +version = "0.1.0" +description = "Swarmauri Documentation" +authors = ["Jacob Stewart "] +package-mode = false + +[tool.poetry.dependencies] +python = ">=3.12,<3.13" +mkdocs-material = "^9.6.4" +mkdocstrings = {version = "^0.28.1", extras=["python"]} +mkdocs-autorefs = "*" +griffe-typingdoc = "*" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/docs/scripts/generate_content.py b/docs/scripts/generate_content.py new file mode 100644 index 000000000..3a68844b7 --- /dev/null +++ b/docs/scripts/generate_content.py @@ -0,0 +1,206 @@ +import os +import pkgutil +import importlib +import inspect +import yaml + +HOME_PAGE_MD = "index.md" # The file name for your home page. + +def ensure_home_page(docs_dir: str): + """ + Ensure there is a docs/index.md for the Home page. + If it doesn't exist, create a minimal file. + """ + home_file_path = os.path.join(docs_dir, HOME_PAGE_MD) + if not os.path.exists(home_file_path): + os.makedirs(os.path.dirname(home_file_path), exist_ok=True) + with open(home_file_path, "w", encoding="utf-8") as f: + f.write("# Welcome\n\nThis is the home page.\n") + print(f"Created a new home page at {home_file_path}") + else: + print(f"Home page already exists at {home_file_path}") + +def generate_docs(package_name: str, output_dir: str) -> dict: + """ + Generate MkDocs-friendly Markdown files for each module and class in 'package_name', + storing them under 'output_dir'. Return a dict describing modules -> list of classes. + """ + os.makedirs(output_dir, exist_ok=True) + + # Attempt to import the package + try: + root_package = importlib.import_module(package_name) + except ImportError as e: + raise ImportError(f"Could not import package '{package_name}': {e}") + + # Ensure it's a proper package + if not hasattr(root_package, "__path__"): + raise ValueError(f"'{package_name}' is not a package or has no __path__ attribute.") + + package_path = root_package.__path__ + + # This will map "swarmauri_core.module_name" -> ["Class1", "Class2", ...] + module_classes_map = {} + + for module_info in pkgutil.walk_packages(package_path, prefix=package_name + "."): + module_name = module_info.name + + try: + module = importlib.import_module(module_name) + except ImportError: + continue + + # Convert "swarmauri_core.some_module" -> "swarmauri_core/some_module.md" + relative_path = module_name.replace(".", "/") + doc_file_path = os.path.join(output_dir, f"{relative_path}.md") + os.makedirs(os.path.dirname(doc_file_path), exist_ok=True) + + # Gather all classes actually defined in this module + classes = [ + (cls_name, cls_obj) + for cls_name, cls_obj in inspect.getmembers(module, inspect.isclass) + if cls_obj.__module__ == module_name + ] + class_names = [cls_name for cls_name, _ in classes] + + # ---- Write the module Markdown file ---- + with open(doc_file_path, "w", encoding="utf-8") as md_file: + md_file.write(f"# Documentation for `{module_name}`\n\n") + md_file.write(f"::: {module_name}\n") + md_file.write(" options.extra:\n") + md_file.write(" show_submodules: false\n") + md_file.write(" show_inheritance: false\n") + # Exclude children so we don't double-document classes + md_file.write(" filters:\n") + md_file.write(" - '!.*' # exclude everything but the module docstring\n\n") + + if class_names: + md_file.write("## Classes\n\n") + for cls_name in class_names: + # Link to a separate class doc + md_file.write(f"- [`{cls_name}`]({cls_name}.md)\n") + md_file.write("\n") + + # ---- Write separate files for each class ---- + for cls_name, _ in classes: + class_file_path = os.path.join(os.path.dirname(doc_file_path), f"{cls_name}.md") + with open(class_file_path, "w", encoding="utf-8") as cls_md_file: + cls_md_file.write(f"# Class `{module_name}.{cls_name}`\n\n") + cls_md_file.write(f"::: {module_name}.{cls_name}\n") + cls_md_file.write(" options.extra:\n") + cls_md_file.write(" show_inheritance: true\n\n") + + module_classes_map[module_name] = class_names + + return module_classes_map + + +def build_nav( + package_name: str, + module_classes_map: dict, + docs_dir: str, + local_output_dir: str, + top_label: str = "core", + home_page: str = "index.md" +) -> list: + """ + Return a nav structure that looks like: + + nav: + - core: + - Home: index.md + - ClassOne: path/to/ClassOne.md + - ClassTwo: path/to/ClassTwo.md + ... + """ + # Sort the modules for stable output + sorted_modules = sorted(module_classes_map.keys()) + + # We'll build a single top-level list containing one dictionary: { core: [...] }. + # 1) Start with "Home" => index.md + core_items = [{"Home": os.path.join( + local_output_dir, + home_page + )}] + + # 2) For each module, add each class to the nav at the same level + for module_name in sorted_modules: + class_names = sorted(module_classes_map[module_name]) + for cls_name in class_names: + # E.g. "src/swarmauri_core/ComponentBase/ComponentBase.md" + if len(module_name.split('.')) > 2: + print(module_name) + module_name = '/'.join(module_name.split('.')[:2]) + class_md_path = os.path.join( + local_output_dir, + module_name, + f"{cls_name}.md" + ) + else: + module_name = '/'.join(module_name.split('.')[:1]) + class_md_path = os.path.join( + local_output_dir, + module_name, + f"{cls_name}.md" + ) + + core_items.append({cls_name: class_md_path}) + + # Wrap everything under the top_label (e.g. "core") + return [{top_label: core_items}] + +def write_nav_to_mkdocs_yml(mkdocs_yml_path: str, new_nav: list, replace_nav: bool = True): + """ + Load mkdocs.yml and either replace or append to the existing 'nav' key with new_nav. + :param replace_nav: If True, we replace the entire nav with 'new_nav'. + If False, we extend the existing nav by appending 'new_nav'. + """ + if not os.path.isfile(mkdocs_yml_path): + raise FileNotFoundError(f"Could not find mkdocs.yml at '{mkdocs_yml_path}'") + + with open(mkdocs_yml_path, "r", encoding="utf-8") as f: + config = yaml.safe_load(f) + + if "nav" not in config: + config["nav"] = [] + + if replace_nav: + # Overwrite + config["nav"] = new_nav + else: + # Append + config["nav"].extend(new_nav) + + with open(mkdocs_yml_path, "w", encoding="utf-8") as f: + yaml.safe_dump(config, f, sort_keys=False) + + +def generate( + package_name: str, + docs_dir: str, + local_output_dir: str, + mkdocs_yml_path: str, + top_label: str = "core", + home_page: str = "index.md", + replace_nav: bool = False, +): + """ + 1) Ensure there's a Home: index.md + 2) Generate doc files for the given package. + 3) Build a nav structure under 'top_label' (e.g., "core"), with "Home" as the first item. + 4) Write that nav into mkdocs.yml (either replacing or appending). + """ + # Step 1: Ensure Home page + home_page_dir = os.path.join(docs_dir, package_name) + home_page_file_path = os.path.join(package_name, home_page) + ensure_home_page(home_page_dir) + + + # Step 2: Generate doc files + module_classes_map = generate_docs(package_name, docs_dir) + + # Step 3: Build a nav structure + new_nav_structure = build_nav(package_name, module_classes_map, docs_dir, local_output_dir, top_label, home_page_file_path) + + # Step 4: Write to mkdocs.yml + write_nav_to_mkdocs_yml(mkdocs_yml_path, new_nav_structure, replace_nav=replace_nav) diff --git a/pkgs/base/pyproject.toml b/pkgs/base/pyproject.toml index fa40c66e3..18d337a24 100644 --- a/pkgs/base/pyproject.toml +++ b/pkgs/base/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-base" -version = "0.6.1.dev9" +version = "0.6.1" description = "This repository includes base classes and mixins for the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/base/swarmauri_base/conversations/ConversationBase.py b/pkgs/base/swarmauri_base/conversations/ConversationBase.py index 61fcf38d0..c9cd6c37f 100644 --- a/pkgs/base/swarmauri_base/conversations/ConversationBase.py +++ b/pkgs/base/swarmauri_base/conversations/ConversationBase.py @@ -13,7 +13,7 @@ class ConversationBase(IConversation, ComponentBase): """ _history: List[SubclassUnion[MessageBase]] = PrivateAttr(default_factory=list) - resource: ResourceTypes = Field(default=ResourceTypes.CONVERSATION.value) + resource: ResourceTypes = Field(default=ResourceTypes.CONVERSATION) model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True) type: Literal["ConversationBase"] = "ConversationBase" diff --git a/pkgs/community/swarmauri_documentstore_redis/pyproject.toml b/pkgs/community/swarmauri_documentstore_redis/pyproject.toml index a67396aa0..eb5f8f111 100644 --- a/pkgs/community/swarmauri_documentstore_redis/pyproject.toml +++ b/pkgs/community/swarmauri_documentstore_redis/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_documentstore_redis" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Psutil Tool" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies redis = "^4.0" diff --git a/pkgs/community/swarmauri_embedding_mlm/pyproject.toml b/pkgs/community/swarmauri_embedding_mlm/pyproject.toml index da5df34ba..38e86722f 100644 --- a/pkgs/community/swarmauri_embedding_mlm/pyproject.toml +++ b/pkgs/community/swarmauri_embedding_mlm/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_embedding_mlm" -version = "0.6.1.dev9" +version = "0.6.1" description = "example community package" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } [tool.poetry.group.dev.dependencies] diff --git a/pkgs/community/swarmauri_llm_leptonai/pyproject.toml b/pkgs/community/swarmauri_llm_leptonai/pyproject.toml index f3e5967c8..5370ae8ee 100644 --- a/pkgs/community/swarmauri_llm_leptonai/pyproject.toml +++ b/pkgs/community/swarmauri_llm_leptonai/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_llm_leptonai" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Lepton AI Model" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies #leptonai = "^0.22.0" diff --git a/pkgs/community/swarmauri_measurement_mutualinformation/pyproject.toml b/pkgs/community/swarmauri_measurement_mutualinformation/pyproject.toml index 33f50cf20..ef2576d98 100644 --- a/pkgs/community/swarmauri_measurement_mutualinformation/pyproject.toml +++ b/pkgs/community/swarmauri_measurement_mutualinformation/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_measurement_mutualinformation" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Mutual Information Measurement Community Package." authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/community/swarmauri_measurement_tokencountestimator/pyproject.toml b/pkgs/community/swarmauri_measurement_tokencountestimator/pyproject.toml index cbf3d3ebf..9eeaa6c35 100644 --- a/pkgs/community/swarmauri_measurement_tokencountestimator/pyproject.toml +++ b/pkgs/community/swarmauri_measurement_tokencountestimator/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_measurement_tokencountestimator" -version = "0.6.1.dev9" +version = "0.6.1" description = "This repository includes an example of a First Class Swarmauri Example." authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/community/swarmauri_ocr_pytesseract/pyproject.toml b/pkgs/community/swarmauri_ocr_pytesseract/pyproject.toml index 4e3bc3874..de96389eb 100644 --- a/pkgs/community/swarmauri_ocr_pytesseract/pyproject.toml +++ b/pkgs/community/swarmauri_ocr_pytesseract/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_ocr_pytesseract" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Tesseract Image to Text Model" authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/community/swarmauri_parser_bertembedding/pyproject.toml b/pkgs/community/swarmauri_parser_bertembedding/pyproject.toml index d14f31768..f2967ead8 100644 --- a/pkgs/community/swarmauri_parser_bertembedding/pyproject.toml +++ b/pkgs/community/swarmauri_parser_bertembedding/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_parser_bertembedding" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Bert Embedding Parser" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies transformers = ">=4.45.0" diff --git a/pkgs/community/swarmauri_parser_entityrecognition/pyproject.toml b/pkgs/community/swarmauri_parser_entityrecognition/pyproject.toml index 82fb12d86..aef5e52a9 100644 --- a/pkgs/community/swarmauri_parser_entityrecognition/pyproject.toml +++ b/pkgs/community/swarmauri_parser_entityrecognition/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_parser_entityrecognition" -version = "0.6.1.dev9" +version = "0.6.1" description = "Entity Recognition Parser for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies spacy = ">=3.0.0,<=3.8.2" diff --git a/pkgs/community/swarmauri_parser_fitzpdf/pyproject.toml b/pkgs/community/swarmauri_parser_fitzpdf/pyproject.toml index 1e32f657f..21bbb0307 100644 --- a/pkgs/community/swarmauri_parser_fitzpdf/pyproject.toml +++ b/pkgs/community/swarmauri_parser_fitzpdf/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_parser_fitzpdf" -version = "0.6.1.dev9" +version = "0.6.1" description = "Fitz PDF Parser for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies PyMuPDF = "^1.24.12" diff --git a/pkgs/community/swarmauri_parser_fitzpdf/tests/unit/FitzPdfParser_test.py b/pkgs/community/swarmauri_parser_fitzpdf/tests/unit/FitzPdfParser_test.py index 3d62ea58e..47517d8f9 100644 --- a/pkgs/community/swarmauri_parser_fitzpdf/tests/unit/FitzPdfParser_test.py +++ b/pkgs/community/swarmauri_parser_fitzpdf/tests/unit/FitzPdfParser_test.py @@ -1,7 +1,7 @@ from unittest import mock import pytest -from swarmauri_parser_fitzpdf.FitzPdfParser import PDFtoTextParser as Parser +from swarmauri_parser_fitzpdf.FitzPdfParser import FitzPdfParser as Parser @pytest.mark.unit diff --git a/pkgs/community/swarmauri_parser_pypdf2/pyproject.toml b/pkgs/community/swarmauri_parser_pypdf2/pyproject.toml index 5fae22f68..e0a3129dd 100644 --- a/pkgs/community/swarmauri_parser_pypdf2/pyproject.toml +++ b/pkgs/community/swarmauri_parser_pypdf2/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_parser_pypdf2" -version = "0.6.1.dev9" +version = "0.6.1" description = "PyPDF2 Parser for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies PyPDF2 = "^3.0.1" diff --git a/pkgs/community/swarmauri_parser_pypdftk/pyproject.toml b/pkgs/community/swarmauri_parser_pypdftk/pyproject.toml index f19a06eb6..e2da30a00 100644 --- a/pkgs/community/swarmauri_parser_pypdftk/pyproject.toml +++ b/pkgs/community/swarmauri_parser_pypdftk/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_parser_fitzpdf" -version = "0.6.1.dev9" +version = "0.6.1" description = "Fitz PDF Parser for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies pypdftk = "^0.5" diff --git a/pkgs/community/swarmauri_parser_textblob/pyproject.toml b/pkgs/community/swarmauri_parser_textblob/pyproject.toml index d064c09d9..8ae81e1f2 100644 --- a/pkgs/community/swarmauri_parser_textblob/pyproject.toml +++ b/pkgs/community/swarmauri_parser_textblob/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_parser_textblob" -version = "0.6.1.dev9" +version = "0.6.1" description = "TextBlob Parser for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies textblob = "^0.18.0" diff --git a/pkgs/community/swarmauri_state_clipboard/pyproject.toml b/pkgs/community/swarmauri_state_clipboard/pyproject.toml index 1579b5ef8..6c0462894 100644 --- a/pkgs/community/swarmauri_state_clipboard/pyproject.toml +++ b/pkgs/community/swarmauri_state_clipboard/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_state_clipboard" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Community Clipboard State" authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/community/swarmauri_tool_captchagenerator/poetry.lock b/pkgs/community/swarmauri_tool_captchagenerator/poetry.lock index e5ff23866..c0e12411e 100644 --- a/pkgs/community/swarmauri_tool_captchagenerator/poetry.lock +++ b/pkgs/community/swarmauri_tool_captchagenerator/poetry.lock @@ -1081,7 +1081,7 @@ develop = false [package.dependencies] pydantic = "^2.0" -swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/core"} +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } [package.source] type = "git" @@ -1133,7 +1133,7 @@ Pillow = ">=8.0,<11.0" pydantic = "^2.9.2" requests = "^2.32.3" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/base"} -swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/core"} +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } toml = "^0.10.2" typing_extensions = "*" diff --git a/pkgs/community/swarmauri_tool_captchagenerator/pyproject.toml b/pkgs/community/swarmauri_tool_captchagenerator/pyproject.toml index 860e76511..44174cb41 100644 --- a/pkgs/community/swarmauri_tool_captchagenerator/pyproject.toml +++ b/pkgs/community/swarmauri_tool_captchagenerator/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_captchagenerator" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Community Captcha Generator Tool" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # dependencies captcha = "^0.6.0" diff --git a/pkgs/community/swarmauri_tool_dalechallreadability/pyproject.toml b/pkgs/community/swarmauri_tool_dalechallreadability/pyproject.toml index be9d857c4..118a5f321 100644 --- a/pkgs/community/swarmauri_tool_dalechallreadability/pyproject.toml +++ b/pkgs/community/swarmauri_tool_dalechallreadability/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_dalechallreadability" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Community Dale-Chall Readability Tool" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # dependencies textstat = "^0.7.4" diff --git a/pkgs/community/swarmauri_tool_downloadpdf/pyproject.toml b/pkgs/community/swarmauri_tool_downloadpdf/pyproject.toml index 823838d98..64010dbe0 100644 --- a/pkgs/community/swarmauri_tool_downloadpdf/pyproject.toml +++ b/pkgs/community/swarmauri_tool_downloadpdf/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_downloadpdf" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Community Download PDF Tool" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } [tool.poetry.group.dev.dependencies] flake8 = "^7.0" diff --git a/pkgs/community/swarmauri_tool_entityrecognition/pyproject.toml b/pkgs/community/swarmauri_tool_entityrecognition/pyproject.toml index 4e58425cc..952c02a5f 100644 --- a/pkgs/community/swarmauri_tool_entityrecognition/pyproject.toml +++ b/pkgs/community/swarmauri_tool_entityrecognition/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_entityrecognition" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Community Entity Recognition Tool" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies transformers = ">=4.45.0" diff --git a/pkgs/community/swarmauri_tool_folium/pyproject.toml b/pkgs/community/swarmauri_tool_folium/pyproject.toml index f79549c37..21c94c89e 100644 --- a/pkgs/community/swarmauri_tool_folium/pyproject.toml +++ b/pkgs/community/swarmauri_tool_folium/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_folium" -version = "0.6.1.dev9" +version = "0.6.1" description = "This repository includes an example of a First Class Swarmauri Example." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies folium = "^0.18.0" diff --git a/pkgs/community/swarmauri_tool_gmail/pyproject.toml b/pkgs/community/swarmauri_tool_gmail/pyproject.toml index b237dbe66..8529c0571 100644 --- a/pkgs/community/swarmauri_tool_gmail/pyproject.toml +++ b/pkgs/community/swarmauri_tool_gmail/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_gmail" -version = "0.6.1.dev9" +version = "0.6.1" description = "example community package" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies google-api-python-client = "^2.157.0" diff --git a/pkgs/community/swarmauri_tool_jupyterclearoutput/LICENSE b/pkgs/community/swarmauri_tool_jupyterclearoutput/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterclearoutput/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterclearoutput/README.md b/pkgs/community/swarmauri_tool_jupyterclearoutput/README.md new file mode 100644 index 000000000..975b9f2ce --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterclearoutput/README.md @@ -0,0 +1,174 @@ +![Swamauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterclearoutput +

+ +--- + +# swarmauri_tool_jupyterclearoutput + +JupyterClearOutputTool is a component designed for removing outputs from cells in a Jupyter Notebook. This ensures the notebook remains uncluttered, making it ideal for sharing and version control. It preserves the cell code and metadata, resets the execution counts, and logs the operation for auditing purposes, returning a cleaned notebook data structure. + +## Installation + +Install this package via PyPI: + + pip install swarmauri_tool_jupyterclearoutput + +This package requires Python 3.10 or newer. By installing swarmauri_tool_jupyterclearoutput, all additional dependencies (such as nbconvert, swarmauri_core, and swarmauri_base) will be installed automatically. + +## Usage + +After installation, import and instantiate JupyterClearOutputTool to clear cell outputs from an in-memory notebook. You can load your notebook into a Python dictionary (for example, using json.load on a .ipynb file) and pass that dictionary to the tool. + +Example usage: + +-------------------------------------------------------------------------------- +```python +from swarmauri_tool_jupyterclearoutput import JupyterClearOutputTool + +# Suppose 'notebook_data' is a dictionary representing a Jupyter Notebook (e.g., loaded from a .ipynb file) +notebook_data = { + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + {"output_type": "stream", "name": "stdout", "text": ["Hello World\n"]} + ], + "source": ["print('Hello World')"] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": ["# This is a markdown cell"] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} + +tool = JupyterClearOutputTool() +clean_notebook = tool(notebook_data) +``` +# At this point, 'clean_notebook' contains the same notebook but with outputs cleared. +# Each code cell's 'outputs' list is empty and the 'execution_count' is reset to None. + +-------------------------------------------------------------------------------- + +You can then save the modified resulting dictionary back to a .ipynb file. This ensures the notebook is shared without potentially lengthy or sensitive outputs included. + +## Dependencies + +This package relies on: +• Python 3.10 or higher +• swarmauri_core +• swarmauri_base +• nbconvert + +These dependencies are automatically managed by the package installer. No manual installation steps beyond "pip install swarmauri_tool_jupyterclearoutput" are required. + +## Example Code Implementation + +Below is the fully functional implementation for the core tool code: + +-------------------------------------------------------------------------------- +```python +""" +JupyterClearOutputTool.py + +This module defines the JupyterClearOutputTool, a component that removes all outputs from a +Jupyter notebook while preserving cell code and metadata. It handles notebooks of varying +sizes and versions efficiently, logs the clear operation for auditing, and returns a clean +NotebookNode for further use. +""" + +import logging +from typing import List, Dict, Any, Literal +from pydantic import Field +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterClearOutputTool") +class JupyterClearOutputTool(ToolBase): + """ + JupyterClearOutputTool is a tool that removes the outputs from code cells in a Jupyter notebook. + It preserves the cell code and metadata, ensures compatibility with various notebook versions, + and returns a cleaned notebook data structure for further use. + + Attributes: + version (str): The version of the JupyterClearOutputTool. + parameters (List[Parameter]): A list of parameters required for clearing notebook outputs. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterClearOutputTool"]): The type identifier for this tool. + """ + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_data", + type="object", + description="A dictionary that represents the Jupyter Notebook to clear outputs from.", + required=True, + ), + ] + ) + name: str = "JupyterClearOutputTool" + description: str = "Removes outputs from a Jupyter notebook while preserving code and metadata." + type: Literal["JupyterClearOutputTool"] = "JupyterClearOutputTool" + + def __call__(self, notebook_data: Dict[str, Any]) -> Dict[str, Any]: + """ + Removes all outputs from the provided Jupyter notebook data structure. Preserves + cell code and metadata, and resets the execution counts. Logs the operation for auditing + and returns the cleaned notebook. + + Args: + notebook_data (Dict[str, Any]): A dictionary representing the Jupyter Notebook. + + Returns: + Dict[str, Any]: The cleaned Jupyter Notebook dictionary with all cell outputs removed. + + Example: + >>> tool = JupyterClearOutputTool() + >>> clean_notebook = tool(notebook_data) + """ + cells_cleared = 0 + + # Iterate over all cells in the notebook and remove their outputs if they are code cells. + for cell in notebook_data.get("cells", []): + if cell.get("cell_type") == "code": + if "outputs" in cell: + cell["outputs"] = [] + cell["execution_count"] = None + cells_cleared += 1 + + # Log the number of cells cleared for auditing. + logger.info("Cleared outputs from %d cells in the notebook.", cells_cleared) + + # Return the cleaned notebook data structure. + return notebook_data +``` +-------------------------------------------------------------------------------- + +## License + +This project is licensed under the Apache-2.0 License. For additional details, refer to the LICENSE file (if available). \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterclearoutput/payload.json b/pkgs/community/swarmauri_tool_jupyterclearoutput/payload.json new file mode 100644 index 000000000..7c4fceb8b --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterclearoutput/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to clear all outputs from a Jupyter Notebook using nbconvert\u2019s ClearOutputPreprocessor, preparing the notebook for sharing or version control.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterclearoutput", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterClearOutputTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Remove all cell outputs from the notebook.", + "Preserve cell code and metadata.", + "Log the clear operation for auditing.", + "Handle notebooks with large outputs efficiently.", + "Ensure compatibility with various notebook versions.", + "Return a clean NotebookNode for further use." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbconvert", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterclearoutput/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterclearoutput/pyproject.toml new file mode 100644 index 000000000..ae50cedc8 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterclearoutput/pyproject.toml @@ -0,0 +1,55 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterclearoutput" +version = "0.6.1" +description = "A tool designed to clear all outputs from a Jupyter Notebook using nbconvert’s ClearOutputPreprocessor, preparing the notebook for sharing or version control." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterclearoutput/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri dependencies +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base" } +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } +# External dependencies +nbconvert = "^7.16.6" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterclearoutputtool = "swarmauri_tool_jupyterclearoutput:JupyterClearOutputTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterclearoutput/swarmauri_tool_jupyterclearoutput/JupyterClearOutputTool.py b/pkgs/community/swarmauri_tool_jupyterclearoutput/swarmauri_tool_jupyterclearoutput/JupyterClearOutputTool.py new file mode 100644 index 000000000..bd082ae81 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterclearoutput/swarmauri_tool_jupyterclearoutput/JupyterClearOutputTool.py @@ -0,0 +1,82 @@ +""" +JupyterClearOutputTool.py + +This module defines the JupyterClearOutputTool, a component that removes all outputs from a +Jupyter notebook while preserving cell code and metadata. It handles notebooks of varying +sizes and versions efficiently, logs the clear operation for auditing, and returns a clean +NotebookNode for further use. +""" + +import logging +from typing import List, Dict, Any, Literal +from pydantic import Field +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterClearOutputTool") +class JupyterClearOutputTool(ToolBase): + """ + JupyterClearOutputTool is a tool that removes the outputs from code cells in a Jupyter notebook. + It preserves the cell code and metadata, ensures compatibility with various notebook versions, + and returns a cleaned notebook data structure for further use. + + Attributes: + version (str): The version of the JupyterClearOutputTool. + parameters (List[Parameter]): A list of parameters required for clearing notebook outputs. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterClearOutputTool"]): The type identifier for this tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_data", + type="object", + description="A dictionary that represents the Jupyter Notebook to clear outputs from.", + required=True, + ), + ] + ) + name: str = "JupyterClearOutputTool" + description: str = ( + "Removes outputs from a Jupyter notebook while preserving code and metadata." + ) + type: Literal["JupyterClearOutputTool"] = "JupyterClearOutputTool" + + def __call__(self, notebook_data: Dict[str, Any]) -> Dict[str, Any]: + """ + Removes all outputs from the provided Jupyter notebook data structure. Preserves + cell code and metadata, and resets the execution counts. Logs the operation for auditing + and returns the cleaned notebook. + + Args: + notebook_data (Dict[str, Any]): A dictionary representing the Jupyter Notebook. + + Returns: + Dict[str, Any]: The cleaned Jupyter Notebook dictionary with all cell outputs removed. + + Example: + >>> tool = JupyterClearOutputTool() + >>> clean_notebook = tool(notebook_data) + """ + cells_cleared = 0 + + # Iterate over all cells in the notebook and remove their outputs if they are code cells. + for cell in notebook_data.get("cells", []): + if cell.get("cell_type") == "code": + if "outputs" in cell: + cell["outputs"] = [] + cell["execution_count"] = None + cells_cleared += 1 + + # Log the number of cells cleared for auditing. + logger.info("Cleared outputs from %d cells in the notebook.", cells_cleared) + + # Return the cleaned notebook data structure. + return notebook_data diff --git a/pkgs/community/swarmauri_tool_jupyterclearoutput/swarmauri_tool_jupyterclearoutput/__init__.py b/pkgs/community/swarmauri_tool_jupyterclearoutput/swarmauri_tool_jupyterclearoutput/__init__.py new file mode 100644 index 000000000..03d63e9f8 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterclearoutput/swarmauri_tool_jupyterclearoutput/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterclearoutput.JupyterClearOutputTool import ( + JupyterClearOutputTool, +) + + +__all__ = ["JupyterClearOutputTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterclearoutput") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterclearoutput/tests/unit/test_JupyterClearOutputTool.py b/pkgs/community/swarmauri_tool_jupyterclearoutput/tests/unit/test_JupyterClearOutputTool.py new file mode 100644 index 000000000..ae3db2eb0 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterclearoutput/tests/unit/test_JupyterClearOutputTool.py @@ -0,0 +1,166 @@ +""" +test_JupyterClearOutputTool.py + +This module provides test coverage for the JupyterClearOutputTool, ensuring it properly clears +outputs from Jupyter notebooks and adheres to its defined interface. +""" + +import pytest +from typing import Dict, Any + +from swarmauri_tool_jupyterclearoutput.JupyterClearOutputTool import ( + JupyterClearOutputTool, +) + + +@pytest.fixture +def sample_notebook() -> Dict[str, Any]: + """ + Returns a sample Jupyter notebook structure containing both code and markdown cells, + with some predefined outputs in the code cells. + """ + return { + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [{"output_type": "stream", "text": "Hello World\n"}], + "source": ["print('Hello World')"], + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": ["# This is a markdown cell"], + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + {"output_type": "execute_result", "data": {"text/plain": "2"}} + ], + "source": ["2"], + }, + ], + "metadata": {"language_info": {"name": "python"}}, + "nbformat": 4, + "nbformat_minor": 5, + } + + +def test_tool_instantiation() -> None: + """ + Tests whether the JupyterClearOutputTool can be instantiated with its default parameters. + """ + tool = JupyterClearOutputTool() + assert tool is not None, "Tool instantiation failed." + + +def test_tool_attributes() -> None: + """ + Checks the default attribute values of the JupyterClearOutputTool. + """ + tool = JupyterClearOutputTool() + assert tool.version == "1.0.0", "Version does not match expected default." + assert tool.name == "JupyterClearOutputTool", ( + "Name does not match expected default." + ) + assert tool.type == "JupyterClearOutputTool", ( + "Type does not match expected default." + ) + assert len(tool.parameters) > 0, "Tool parameters should not be empty." + + +def test_clearing_outputs(sample_notebook: Dict[str, Any]) -> None: + """ + Verifies that the tool removes outputs and resets execution counts from all code cells + while preserving the original cell code and metadata. + """ + tool = JupyterClearOutputTool() + cleaned_notebook = tool(sample_notebook) + + # Check that code cells have had their outputs cleared. + code_cells = [ + cell + for cell in cleaned_notebook.get("cells", []) + if cell.get("cell_type") == "code" + ] + for cell in code_cells: + assert cell.get("outputs") == [], "Outputs were not cleared from a code cell." + assert cell.get("execution_count") is None, "Execution count was not reset." + + # Verify that markdown cells remain unchanged. + markdown_cells = [ + cell + for cell in cleaned_notebook.get("cells", []) + if cell.get("cell_type") == "markdown" + ] + assert len(markdown_cells) == 1, "Unexpected number of markdown cells." + original_markdown_cell = sample_notebook["cells"][1] + updated_markdown_cell = markdown_cells[0] + assert updated_markdown_cell["source"] == original_markdown_cell["source"], ( + "Markdown cell content was unexpectedly modified." + ) + + +def test_clearing_with_no_cells() -> None: + """ + Ensures that the tool handles a notebook with no cells without errors. + The notebook dict should remain structurally the same, minus any alterations to cells. + """ + tool = JupyterClearOutputTool() + empty_notebook = {"cells": [], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} + cleaned_notebook = tool(empty_notebook) + + assert "cells" in cleaned_notebook, "Cleaned notebook is missing the 'cells' key." + assert len(cleaned_notebook["cells"]) == 0, "Cells should remain empty." + assert cleaned_notebook["metadata"] == {}, "Metadata should remain unchanged." + + +def test_clearing_with_only_markdown_cells() -> None: + """ + Validates correct behavior when the notebook has only markdown cells. + No outputs should be cleared since there are no code cells. + """ + tool = JupyterClearOutputTool() + markdown_only_notebook = { + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": ["# Just a heading in markdown"], + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": ["Some more markdown content."], + }, + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5, + } + cleaned_notebook = tool(markdown_only_notebook) + + for cell in cleaned_notebook["cells"]: + assert cell["cell_type"] == "markdown", "Cell type should remain markdown." + assert "outputs" not in cell, "Markdown cells should not contain any outputs." + assert cell["source"] is not None, "Markdown cell content should be preserved." + + +def test_parameters_structure() -> None: + """ + Ensures that the tool's parameters are properly defined and contain the required fields. + """ + tool = JupyterClearOutputTool() + assert len(tool.parameters) == 1, "There should be exactly one parameter defined." + param = tool.parameters[0] + assert param.name == "notebook_data", ( + "Parameter name does not match expected value." + ) + assert param.type == "object", "Parameter type does not match expected value." + assert param.required, "Parameter should be required." + assert "A dictionary that represents" in param.description, ( + "Parameter description is missing or incomplete." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterclearoutput/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterclearoutput/tests/unit/test___init__.py new file mode 100644 index 000000000..7dee91599 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterclearoutput/tests/unit/test___init__.py @@ -0,0 +1,38 @@ +""" +Unit tests for the swarmauri_tool_jupyterclearoutput package initialization. + +This module uses pytest to verify that the package's __init__.py file +correctly exposes and imports required classes and variables. +It also ensures that the exposed objects behave as expected. +""" + +from swarmauri_tool_jupyterclearoutput import JupyterClearOutputTool, __version__ + + +class BaseTest: + """ + Base class for all test classes in this module. + This can be extended to include shared setup or teardown logic for tests. + """ + + +class TestSwarmAuriToolsInit(BaseTest): + """ + Test suite for swarmauri_tool_jupyterclearoutput package initialization. + Ensures the correctness of imports and exposed objects within __init__.py. + """ + + def test_jupyter_clear_output_tool_import(self) -> None: + """ + Test if JupyterClearOutputTool is imported correctly and can be instantiated. + """ + tool_instance = JupyterClearOutputTool() + assert tool_instance is not None, "JupyterClearOutputTool instantiation failed." + + def test_version_availability(self) -> None: + """ + Test if __version__ is defined and is a non-empty string. + """ + assert __version__ is not None, "Expected __version__ to be defined." + assert isinstance(__version__, str), "Expected __version__ to be a string." + assert __version__ != "", "Expected __version__ to be a non-empty string." diff --git a/pkgs/community/swarmauri_tool_jupyterdisplay/LICENSE b/pkgs/community/swarmauri_tool_jupyterdisplay/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplay/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterdisplay/README.md b/pkgs/community/swarmauri_tool_jupyterdisplay/README.md new file mode 100644 index 000000000..3355a9144 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplay/README.md @@ -0,0 +1,96 @@ +""" +JupyterDisplayTool.py + +This module defines the JupyterDisplayTool, a component that leverages IPython display +functionality to render data with a variety of rich representations. It inherits from +ToolBase and integrates with the swarmauri framework's tool architecture. +""" + +import logging +from typing import List, Dict, Literal +from pydantic import Field +from IPython.display import display, HTML, Image, Latex, Markdown + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +@ComponentBase.register_type(ToolBase, 'JupyterDisplayTool') +class JupyterDisplayTool(ToolBase): + """ + JupyterDisplayTool is a tool that displays data in a Jupyter environment using IPython's + rich display capabilities. It supports multiple data formats, including plain text, HTML, + images, and LaTeX. + + Attributes: + version (str): The version of the JupyterDisplayTool. + parameters (List[Parameter]): A list of parameters defining the expected inputs. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterDisplayTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="data", + type="string", + description="The data to be displayed. Accepts text, HTML, image paths, or LaTeX content.", + required=True + ), + Parameter( + name="data_format", + type="string", + description="The format of the data ('auto', 'text', 'html', 'image', or 'latex').", + required=False, + default="auto", + enum=["auto", "text", "html", "image", "latex"] + ), + ] + ) + name: str = "JupyterDisplayTool" + description: str = "Displays data in a Jupyter environment using IPython's rich display capabilities." + type: Literal["JupyterDisplayTool"] = "JupyterDisplayTool" + + def __call__(self, data: str, data_format: str = "auto") -> Dict[str, str]: + """ + Renders the provided data in the Jupyter environment using IPython's display. + + Args: + data (str): The data to be displayed. Could be text, HTML, a path to an image, or LaTeX. + data_format (str, optional): The format of the data. Defaults to 'auto'. Supported + values are 'text', 'html', 'image', and 'latex'. + + Returns: + Dict[str, str]: A dictionary containing the status of the operation ("success" or "error") + and a corresponding message. + + Example: + >>> display_tool = JupyterDisplayTool() + >>> display_tool("Hello, world!", "html") + {'status': 'success', 'message': 'Data displayed successfully.'} + """ + logger = logging.getLogger(__name__) + logger.debug("Attempting to display data with data_format=%s", data_format) + + try: + if data_format == "html": + display(HTML(data)) + elif data_format == "latex": + display(Latex(data)) + elif data_format == "image": + display(Image(data)) + elif data_format == "text": + display(Markdown(data)) + else: + display(Markdown(data)) + + logger.debug("Data displayed successfully.") + return {"status": "success", "message": "Data displayed successfully."} + + except Exception as e: + error_message = f"Error displaying data: {str(e)}" + logger.error(error_message) + return {"status": "error", "message": error_message} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterdisplay/payload.json b/pkgs/community/swarmauri_tool_jupyterdisplay/payload.json new file mode 100644 index 000000000..914268936 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplay/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to display rich media and object representations in a Jupyter Notebook using IPython's display functionality.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterdisplay", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterDisplayTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Render objects with rich representations (HTML, images, LaTeX).", + "Utilize IPython's display method for output.", + "Support multiple data types for display.", + "Log display operations for debugging.", + "Handle errors in rendering gracefully.", + "Return confirmation of successful display rendering." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "IPython", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterdisplay/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterdisplay/pyproject.toml new file mode 100644 index 000000000..74d069f03 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplay/pyproject.toml @@ -0,0 +1,58 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterdisplay" +version = "0.6.1" +description = "A tool designed to display rich media and object representations in a Jupyter Notebook using IPython's display functionality." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterdisplay/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +IPython = "^8.32.0" + + + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterdisplaytool = "swarmauri_tool_jupyterdisplay:JupyterDisplayTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterdisplay/swarmauri_tool_jupyterdisplay/JupyterDisplayTool.py b/pkgs/community/swarmauri_tool_jupyterdisplay/swarmauri_tool_jupyterdisplay/JupyterDisplayTool.py new file mode 100644 index 000000000..3b11e120c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplay/swarmauri_tool_jupyterdisplay/JupyterDisplayTool.py @@ -0,0 +1,98 @@ +""" +JupyterDisplayTool.py + +This module defines the JupyterDisplayTool, a component that leverages IPython display +functionality to render data with a variety of rich representations. It inherits from +ToolBase and integrates with the swarmauri framework's tool architecture. +""" + +import logging +from typing import List, Dict, Literal +from pydantic import Field +import IPython.display as ipyd # Updated import to use namespace + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +@ComponentBase.register_type(ToolBase, "JupyterDisplayTool") +class JupyterDisplayTool(ToolBase): + """ + JupyterDisplayTool is a tool that displays data in a Jupyter environment using IPython's + rich display capabilities. It supports multiple data formats, including plain text, HTML, + images, and LaTeX. + + Attributes: + version (str): The version of the JupyterDisplayTool. + parameters (List[Parameter]): A list of parameters defining the expected inputs. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterDisplayTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="data", + type="string", + description="The data to be displayed. Accepts text, HTML, image paths, or LaTeX content.", + required=True, + ), + Parameter( + name="data_format", + type="string", + description="The format of the data ('auto', 'text', 'html', 'image', or 'latex').", + required=False, + default="auto", + enum=["auto", "text", "html", "image", "latex"], + ), + ] + ) + name: str = "JupyterDisplayTool" + description: str = "Displays data in a Jupyter environment using IPython's rich display capabilities." + type: Literal["JupyterDisplayTool"] = "JupyterDisplayTool" + + def __call__(self, data: str, data_format: str = "auto") -> Dict[str, str]: + """ + Renders the provided data in the Jupyter environment using IPython's display. + + Args: + data (str): The data to be displayed. Could be text, HTML, a path to an image, or LaTeX. + data_format (str, optional): The format of the data. Defaults to 'auto'. Supported + values are 'text', 'html', 'image', and 'latex'. + + Returns: + Dict[str, str]: A dictionary containing the status of the operation ("success" or "error") + and a corresponding message. + + Example: + >>> display_tool = JupyterDisplayTool() + >>> display_tool("Hello, world!", "html") + {'status': 'success', 'message': 'Data displayed successfully.'} + """ + logger = logging.getLogger(__name__) + logger.debug("Attempting to display data with data_format=%s", data_format) + + try: + if data_format == "html": + ipyd.display(ipyd.HTML(data)) + elif data_format == "latex": + ipyd.display(ipyd.Latex(data)) + elif data_format == "image": + # If data is a path to an image, display it. Otherwise, it may fail. + ipyd.display(ipyd.Image(data)) + elif data_format == "text": + ipyd.display(ipyd.Markdown(data)) + else: + # 'auto' or anything else defaults to treating the data as text + ipyd.display(ipyd.Markdown(data)) + + logger.debug("Data displayed successfully.") + return {"status": "success", "message": "Data displayed successfully."} + + except Exception as e: + error_message = f"Error displaying data: {str(e)}" + logger.error(error_message) + return {"status": "error", "message": error_message} diff --git a/pkgs/community/swarmauri_tool_jupyterdisplay/swarmauri_tool_jupyterdisplay/__init__.py b/pkgs/community/swarmauri_tool_jupyterdisplay/swarmauri_tool_jupyterdisplay/__init__.py new file mode 100644 index 000000000..4ac756972 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplay/swarmauri_tool_jupyterdisplay/__init__.py @@ -0,0 +1,17 @@ +from swarmauri_tool_jupyterdisplay.JupyterDisplayTool import JupyterDisplayTool + + +__all__ = ["JupyterDisplayTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterdisplay") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterdisplay/tests/unit/test_JupyterDisplayTool.py b/pkgs/community/swarmauri_tool_jupyterdisplay/tests/unit/test_JupyterDisplayTool.py new file mode 100644 index 000000000..55d78565c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplay/tests/unit/test_JupyterDisplayTool.py @@ -0,0 +1,123 @@ +import pytest +from unittest.mock import patch, MagicMock +from swarmauri_tool_jupyterdisplay.JupyterDisplayTool import JupyterDisplayTool + + +@pytest.fixture +def jupyter_display_tool() -> JupyterDisplayTool: + """ + Pytest fixture to instantiate a JupyterDisplayTool object. + + Returns: + JupyterDisplayTool: A new instance of JupyterDisplayTool. + """ + return JupyterDisplayTool() + + +def test_jupyter_display_tool_instantiate( + jupyter_display_tool: JupyterDisplayTool, +) -> None: + """ + Tests basic instantiation of JupyterDisplayTool and checks + that the default attributes match expected values. + """ + assert jupyter_display_tool.name == "JupyterDisplayTool" + assert jupyter_display_tool.version == "1.0.0" + assert jupyter_display_tool.type == "JupyterDisplayTool" + assert len(jupyter_display_tool.parameters) == 2 + + +@patch("IPython.display.display") +def test_jupyter_display_tool_call_text( + mock_display: MagicMock, jupyter_display_tool: JupyterDisplayTool +) -> None: + """ + Tests calling the JupyterDisplayTool with plain text data_format. + Verifies that the response includes a success status and that the display function is called. + """ + data = "Hello, world!" + response = jupyter_display_tool(data, data_format="text") + + assert response["status"] == "success" + assert "successfully" in response["message"].lower() + mock_display.assert_called_once() + + +@patch("IPython.display.display") +def test_jupyter_display_tool_call_html( + mock_display: MagicMock, jupyter_display_tool: JupyterDisplayTool +) -> None: + """ + Tests calling the JupyterDisplayTool with HTML data_format. + Verifies that the response includes a success status and that the display function is called. + """ + data = "Hello, HTML!" + response = jupyter_display_tool(data, data_format="html") + + assert response["status"] == "success" + assert "successfully" in response["message"].lower() + mock_display.assert_called_once() + + +@patch("IPython.display.display") +def test_jupyter_display_tool_call_latex( + mock_display: MagicMock, jupyter_display_tool: JupyterDisplayTool +) -> None: + """ + Tests calling the JupyterDisplayTool with LaTeX data_format. + Verifies that the response includes a success status and that the display function is called. + """ + data = r"\frac{1}{2} \text{ is a fraction.}" + response = jupyter_display_tool(data, data_format="latex") + + assert response["status"] == "success" + assert "successfully" in response["message"].lower() + mock_display.assert_called_once() + + +@patch("IPython.display.display") +def test_jupyter_display_tool_call_image( + mock_display: MagicMock, jupyter_display_tool: JupyterDisplayTool +) -> None: + """ + Tests calling the JupyterDisplayTool with image data_format. + Verifies that the response includes a success status and that the display function is called. + """ + data = "test_image.png" + response = jupyter_display_tool(data, data_format="image") + + assert response["status"] == "success" + assert "successfully" in response["message"].lower() + mock_display.assert_called_once() + + +@patch("IPython.display.display") +def test_jupyter_display_tool_call_auto( + mock_display: MagicMock, jupyter_display_tool: JupyterDisplayTool +) -> None: + """ + Tests calling the JupyterDisplayTool with the default (auto) data_format. + In this scenario, it should treat the data as text and display it accordingly. + """ + data = "Auto-detected text content." + response = jupyter_display_tool(data) + + assert response["status"] == "success" + assert "successfully" in response["message"].lower() + mock_display.assert_called_once() + + +@patch("IPython.display.display") +def test_jupyter_display_tool_call_error( + mock_display: MagicMock, jupyter_display_tool: JupyterDisplayTool +) -> None: + """ + Tests error handling in the JupyterDisplayTool by causing an exception to be raised + during display. Verifies that an error response is returned. + """ + mock_display.side_effect = Exception("Display function error") + data = "This will cause an exception." + response = jupyter_display_tool(data, data_format="text") + + assert response["status"] == "error" + assert "error" in response["message"].lower() diff --git a/pkgs/community/swarmauri_tool_jupyterdisplay/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterdisplay/tests/unit/test___init__.py new file mode 100644 index 000000000..9bb6bdbb8 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplay/tests/unit/test___init__.py @@ -0,0 +1,30 @@ +""" +Module containing unit tests for the swarmauri_tool_jupyterdisplay package initialization. + +The tests in this file ensure that all modules, classes, and attributes are correctly +exposed at the package level, including verifying the presence of JupyterDisplayTool and +the package's version string. +""" + +from swarmauri_tool_jupyterdisplay import JupyterDisplayTool, __version__ + + +class TestPackageInit: + """Test suite for the swarmauri_tool_jupyterdisplay package initialization.""" + + def test_jupyter_display_tool_exposed(self) -> None: + """ + Test that JupyterDisplayTool is exposed at the package level. + """ + assert JupyterDisplayTool is not None, ( + "Expected JupyterDisplayTool to be exposed by the package." + ) + + def test_package_version_exposed(self) -> None: + """ + Test that the package version is available in __version__. + """ + # Check that __version__ is a non-empty string + assert isinstance(__version__, str) and len(__version__) > 0, ( + "Expected a non-empty string for __version__." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterdisplayhtml/LICENSE b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterdisplayhtml/README.md b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/README.md new file mode 100644 index 000000000..b4e6b11c5 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/README.md @@ -0,0 +1,84 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterdisplayhtml +

+ +--- + +# swarmauri_tool_jupyterdisplayhtml + +A tool designed to render HTML content within a Jupyter Notebook using IPython's HTML display method. + +## Installation + +1. Ensure you have Python 3.10 or newer installed. +2. Install from PyPI using your preferred package manager: + + • pip: + pip install swarmauri_tool_jupyterdisplayhtml + + • Poetry: + poetry add swarmauri_tool_jupyterdisplayhtml + +This will pull down all required dependencies, including IPython for HTML display capabilities, and the Swarmauri Core/Base libraries for tool interaction. + +## Usage + +Once installed, import the JupyterDisplayHTMLTool class and invoke it to render HTML content in a Jupyter cell. Here is a simple example to get you started: + +------------------------------------------------------------------------------------------- +Example usage: + +from swarmauri_tool_jupyterdisplayhtml import JupyterDisplayHTMLTool + +def main(): + # Instantiate the tool + display_tool = JupyterDisplayHTMLTool() + + # Sample HTML content + html_snippet = """ +

Hello from Swarmauri!

+

This content is displayed using JupyterDisplayHTMLTool.

+ """ + + # Call the tool with the HTML content + result = display_tool(html_snippet) + + # The tool returns a dictionary with status and message + print(f"Status: {result['status']}") + print(f"Message: {result['message']}") + +if __name__ == "__main__": + main() +------------------------------------------------------------------------------------------- + +Running this script in a Jupyter Notebook cell will display the HTML heading and paragraph above the cell's output. The command line output after invocation will confirm whether the display was successful or if an error occurred. + +## Extended Options + +• Dynamic Updates: You can instantiate the tool once and call it multiple times with different HTML fragments to display updated content in different cells. +• Integration with Other Tools: Because JupyterDisplayHTMLTool inherits from ToolBase, it integrates cleanly with other Swarmauri-based tools and workflows. +• Error Handling: If an error occurs while rendering HTML, the returned dictionary will have "status" = "error" and a "message" describing the issue. + +## Dependencies + +• swarmauri_core (>=0.6.0.dev1): Provides core mechanics and decorators for registering this tool. +• swarmauri_base (>=0.6.0.dev1): Supplies the base ToolBase class. +• IPython: Used to display HTML content in a Jupyter environment. + +For detailed version requirements, see the "pyproject.toml" file in this project. The code is written following PEP 8 guidelines, uses type hints, and includes docstrings to clarify functionality at every class and method level. Additional logs and comments assist in understanding critical points of the implementation. + +--- + +Use this package to effortlessly render HTML content in a Jupyter environment and integrate the display process within your broader Swarmauri-based ecosystem. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterdisplayhtml/payload.json b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/payload.json new file mode 100644 index 000000000..09814b8dd --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to render HTML content within a Jupyter Notebook using IPython's HTML display method.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterdisplayhtml", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterDisplayHTMLTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Render a string of HTML within a notebook cell.", + "Support dynamic HTML content updates.", + "Log display actions and errors.", + "Integrate with other visualization tools.", + "Handle malformed HTML gracefully.", + "Return the rendered HTML output confirmation." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "IPython", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterdisplayhtml/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/pyproject.toml new file mode 100644 index 000000000..32da42035 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterdisplayhtml" +version = "0.6.1" +description = "A tool designed to render HTML content within a Jupyter Notebook using IPython's HTML display method." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "https://github.com/swarmauri/swarmauri-sdk" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri dependencies +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Other dependencies +IPython = "^8.32.0" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterdisplayhtmltool = "swarmauri_tool_jupyterdisplayhtml:JupyterDisplayHTMLTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterdisplayhtml/swarmauri_tool_jupyterdisplayhtml/JupyterDisplayHTMLTool.py b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/swarmauri_tool_jupyterdisplayhtml/JupyterDisplayHTMLTool.py new file mode 100644 index 000000000..5623d2672 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/swarmauri_tool_jupyterdisplayhtml/JupyterDisplayHTMLTool.py @@ -0,0 +1,85 @@ +from typing import List, Dict, Literal +from pydantic import Field +import logging + +from IPython.display import HTML, display +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +""" +JupyterDisplayHTMLTool.py + +This module defines the JupyterDisplayHTMLTool, a tool that displays HTML content within +a Jupyter Notebook cell. It inherits from the ToolBase class and supports dynamic HTML +content updates, integrates with other visualization tools, handles malformed HTML gracefully, +and returns a confirmation of the displayed output. +""" + + +@ComponentBase.register_type(ToolBase, "JupyterDisplayHTMLTool") +class JupyterDisplayHTMLTool(ToolBase): + """ + JupyterDisplayHTMLTool is responsible for rendering HTML within a Jupyter Notebook cell. + It supports dynamic updates by allowing multiple calls with new HTML content, integrates + easily with other visualization tools, and logs the display actions and any errors + encountered. + + Attributes: + version (str): The version of the JupyterDisplayHTMLTool. + parameters (List[Parameter]): A list of parameters required to render HTML content. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterDisplayHTMLTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="html_content", + type="string", + description="The HTML content to display within the Jupyter Notebook cell.", + required=True, + ), + ] + ) + name: str = "JupyterDisplayHTMLTool" + description: str = "Renders HTML content within a Jupyter environment." + type: Literal["JupyterDisplayHTMLTool"] = "JupyterDisplayHTMLTool" + + def __call__(self, html_content: str) -> Dict[str, str]: + """ + Renders the provided HTML content in a Jupyter Notebook cell and returns a + status message indicating whether the operation succeeded. + + Args: + html_content (str): The HTML content to be rendered. + + Returns: + Dict[str, str]: A dictionary containing 'status' and 'message' keys. + If successful, 'status' will be 'success' and 'message' + will confirm the rendered HTML. In the event of an error, + 'status' will be 'error' and 'message' will contain + the error description. + + Example: + >>> tool = JupyterDisplayHTMLTool() + >>> result = tool("

Hello, world!

") + >>> print(result) + {'status': 'success', 'message': 'HTML displayed successfully.'} + """ + try: + logger.info("Attempting to display HTML content in Jupyter...") + # Display the HTML content in the notebook cell. + display(HTML(html_content)) + + logger.info("HTML content displayed successfully.") + return {"status": "success", "message": "HTML displayed successfully."} + except Exception as e: + error_msg = f"An error occurred while displaying HTML: {str(e)}" + logger.error(error_msg) + return {"status": "error", "message": error_msg} diff --git a/pkgs/community/swarmauri_tool_jupyterdisplayhtml/swarmauri_tool_jupyterdisplayhtml/__init__.py b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/swarmauri_tool_jupyterdisplayhtml/__init__.py new file mode 100644 index 000000000..a9302e55e --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/swarmauri_tool_jupyterdisplayhtml/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterdisplayhtml.JupyterDisplayHTMLTool import ( + JupyterDisplayHTMLTool, +) + + +__all__ = ["JupyterDisplayHTMLTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterdisplayhtml") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterdisplayhtml/tests/unit/test_JupyterDisplayHTMLTool.py b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/tests/unit/test_JupyterDisplayHTMLTool.py new file mode 100644 index 000000000..9c015bf39 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/tests/unit/test_JupyterDisplayHTMLTool.py @@ -0,0 +1,81 @@ +import pytest +from unittest.mock import patch + +from swarmauri_tool_jupyterdisplayhtml.JupyterDisplayHTMLTool import ( + JupyterDisplayHTMLTool, +) +from swarmauri_base.tools.ToolBase import ToolBase + + +""" +test_JupyterDisplayHTMLTool.py + +This module contains pytest-based test cases for the JupyterDisplayHTMLTool to verify +the functionality and correctness of the class. Each test focuses on different aspects +of the tool's behavior, including instantiation, attribute values, and the handling of +HTML display operations within a Jupyter notebook environment. +""" + + +@pytest.fixture +def tool() -> JupyterDisplayHTMLTool: + """ + Provides a fixture that returns a new instance of JupyterDisplayHTMLTool + for use in multiple tests. + """ + return JupyterDisplayHTMLTool() + + +def test_tool_inheritance(tool: JupyterDisplayHTMLTool) -> None: + """ + Tests that JupyterDisplayHTMLTool inherits from ToolBase, ensuring + the correct class hierarchy. + """ + assert isinstance(tool, ToolBase), "Tool should inherit from ToolBase." + + +def test_tool_attributes(tool: JupyterDisplayHTMLTool) -> None: + """ + Verifies that the default attributes of JupyterDisplayHTMLTool + match the expected values. + """ + assert tool.name == "JupyterDisplayHTMLTool", "Tool name is incorrect." + assert tool.description == "Renders HTML content within a Jupyter environment.", ( + "Tool description is incorrect." + ) + assert tool.version == "1.0.0", "Tool version is incorrect." + assert tool.type == "JupyterDisplayHTMLTool", "Tool type is incorrect." + assert len(tool.parameters) == 1, "Unexpected number of parameters." + assert tool.parameters[0].name == "html_content", ( + "First parameter should be 'html_content'." + ) + + +def test_tool_call_success(tool: JupyterDisplayHTMLTool) -> None: + """ + Tests that calling the tool with valid HTML content succeeds and + returns the expected response dictionary. + """ + test_html = "

Hello, World!

" + result = tool(test_html) + assert result["status"] == "success", "Expected success status." + assert "HTML displayed successfully." in result["message"], ( + "Expected success message." + ) + + +@patch("swarmauri_tool_jupyterdisplayhtml.JupyterDisplayHTMLTool.display") +def test_tool_call_error(mock_display, tool: JupyterDisplayHTMLTool) -> None: + """ + Tests that the tool handles exceptions during HTML display operations + by returning an error status and message. The 'display' function is + patched to raise an exception, simulating a failure scenario. + """ + mock_display.side_effect = Exception("Simulated display error") + test_html = "

This will fail

" + result = tool(test_html) + assert result["status"] == "error", "Expected error status." + assert ( + "An error occurred while displaying HTML: Simulated display error" + in result["message"] + ), "Expected error message to contain the simulated exception." diff --git a/pkgs/community/swarmauri_tool_jupyterdisplayhtml/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/tests/unit/test___init__.py new file mode 100644 index 000000000..6d9a429de --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterdisplayhtml/tests/unit/test___init__.py @@ -0,0 +1,61 @@ +""" +Module-level docstring: +This module provides pytest-based test cases for verifying that the package +swarmauri_tool_jupyterdisplayhtml initializes correctly and that the members +from __init__.py are exposed as expected. +""" + +import pytest +from swarmauri_tool_jupyterdisplayhtml import ( + __version__, + __all__, + JupyterDisplayHTMLTool, +) + + +class TestInit(object): + """ + Test class for verifying the initialization behavior of the + swarmauri_tool_jupyterdisplayhtml package. + """ + + def test_jupyterdisplayhtmltool_in_all(self) -> None: + """ + Ensures 'JupyterDisplayHTMLTool' is exposed in the package's __all__ list. + + This test checks if the package initialization correctly includes + JupyterDisplayHTMLTool in the __all__ attribute. + """ + assert "JupyterDisplayHTMLTool" in __all__, ( + "Expected 'JupyterDisplayHTMLTool' to be in __all__, but it was not found." + ) + + def test_version_is_string(self) -> None: + """ + Ensures that __version__ is exposed and is a string. + + This test checks that the __version__ attribute is not None and + that it is of type string. + """ + assert __version__ is not None, ( + "Expected '__version__' to be defined, but it is None." + ) + assert isinstance(__version__, str), ( + f"Expected '__version__' to be a str, got {type(__version__)}." + ) + + def test_jupyterdisplayhtmltool_instantiation(self) -> None: + """ + Verifies that an instance of JupyterDisplayHTMLTool can be created. + + This test checks that the constructor for JupyterDisplayHTMLTool does + not raise an exception, ensuring the tool is validly exposed. + """ + tool = None + try: + tool = JupyterDisplayHTMLTool() + except Exception as exc: + pytest.fail( + f"Instantiating JupyterDisplayHTMLTool raised an exception: {exc}" + ) + assert tool is not None, "Failed to instantiate JupyterDisplayHTMLTool." diff --git a/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/LICENSE b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/README.md b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/README.md new file mode 100644 index 000000000..c9eb1b668 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/README.md @@ -0,0 +1,125 @@ + + +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterexecuteandconvert +

+ +--- + +# swarmauri_tool_jupyterexecuteandconvert + +This package provides functionality to programmatically execute a Jupyter Notebook and convert it to a variety of output formats using nbconvert, enabling automated workflows within the Swarmauri framework. + +--- + +## Installation + +swarmauri_tool_jupyterexecuteandconvert supports Python 3.10 to 3.13. To install from PyPI, use: + +pip install swarmauri_tool_jupyterexecuteandconvert + +Once installed, the JupyterExecuteAndConvertTool becomes available, offering notebook execution and conversion features via the nbconvert CLI. + +--- + +## Usage + +Below is a detailed example of how to utilize the JupyterExecuteAndConvertTool in your environment. The tool exposes a callable class that you can directly instantiate and use in your Python code. + +1. Import the tool into your code: + + from swarmauri_tool_jupyterexecuteandconvert import JupyterExecuteAndConvertTool + +2. Create an instance of the tool: + + notebook_tool = JupyterExecuteAndConvertTool() + +3. Invoke the tool to execute and convert a notebook: + + result = notebook_tool( + notebook_path="path/to/your_notebook.ipynb", + output_format="pdf", # can also be "html" + execution_timeout=600 # optional, defaults to 600 seconds + ) + +4. Process the returned dictionary: + + if "status" in result and result["status"] == "success": + print(f"Successfully converted notebook to: {result['converted_file']}") + else: + print(f"Error: {result.get('error')} - {result.get('message')}") + +The result dictionary can contain: +• "converted_file": A string representing the output file name. +• "status": "success" if execution and conversion succeeded. +• "error" and "message": In the event of any errors during execution or conversion. + +Here is a short illustration: + +--------------------------------------------------------------------------------------- +from swarmauri_tool_jupyterexecuteandconvert import JupyterExecuteAndConvertTool + +# Create the tool instance +tool = JupyterExecuteAndConvertTool() + +# Execute and convert a Jupyter notebook to PDF with a 5-minute timeout +response = tool( + notebook_path="analysis.ipynb", + output_format="pdf", + execution_timeout=300 +) + +if response.get("status") == "success": + print(f"Notebook converted: {response['converted_file']}") +else: + print(f"Error type: {response.get('error')}") + print(f"Error message: {response.get('message')}") +--------------------------------------------------------------------------------------- + +--- + +## Dependencies + +• nbconvert: Used for executing and converting Jupyter notebooks to the desired output format. +• swarmauri_core, swarmauri_base: Required dependencies from the Swarmauri framework, providing essential base classes and utilities. +• Python 3.10 or above. + +The tool automatically integrates into the Swarmauri ecosystem by inheriting from ToolBase and registering itself with ComponentBase. + +--- + +### About JupyterExecuteAndConvertTool + +The JupyterExecuteAndConvertTool is defined in JupyterExecuteAndConvertTool.py. It inherits from ToolBase and uses the @ComponentBase.register_type decorator, making it seamlessly integrable as a Swarmauri tool. It logs notebook execution progress and handles any errors or timeouts. Once the notebook is executed, nbconvert is used again to convert the resultant executed notebook to the specified format (HTML or PDF). + +Key attributes within the tool: +• version: A string indicating the current version of the tool. +• parameters: A list of Parameter objects describing inputs such as notebook_path, output_format, and execution_timeout. +• __call__: A method accepting notebook_path, output_format, and execution_timeout, returning a dictionary with information about the process result or any encountered errors. + +--- + +## Contributing + +Thank you for your interest in swarmauri_tool_jupyterexecuteandconvert. Pull requests and bug reports are welcome. Please see our issue tracker for existing requests and open issues. + +--- + +© 2023 Swarmauri – Licensed under the Apache License, Version 2.0. +Happy notebook converting! \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/payload.json b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/payload.json new file mode 100644 index 000000000..116fee7aa --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool that programmatically executes and converts a Jupyter Notebook using nbconvert's CLI functionality, enabling automated notebook execution and format conversion.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterexecuteandconvert", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterExecuteAndConvertTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Execute a notebook via CLI using nbconvert --execute.", + "Convert the executed notebook to a specified format (e.g., HTML or PDF).", + "Log execution and conversion processes.", + "Handle timeouts and execution errors gracefully.", + "Return the output file path and conversion status.", + "Integrate with automated reporting workflows." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbconvert", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/pyproject.toml new file mode 100644 index 000000000..fbf8ef6c6 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterexecuteandconvert" +version = "0.6.1" +description = "A tool that programmatically executes and converts a Jupyter Notebook using nbconvert's CLI functionality, enabling automated notebook execution and format conversion." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbconvert = "^7.16.6" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterexecuteandconverttool = "swarmauri_tool_jupyterexecuteandconvert:JupyterExecuteAndConvertTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/swarmauri_tool_jupyterexecuteandconvert/JupyterExecuteAndConvertTool.py b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/swarmauri_tool_jupyterexecuteandconvert/JupyterExecuteAndConvertTool.py new file mode 100644 index 000000000..8c0bf43f4 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/swarmauri_tool_jupyterexecuteandconvert/JupyterExecuteAndConvertTool.py @@ -0,0 +1,189 @@ +""" +JupyterExecuteAndConvertTool.py + +This module defines the JupyterExecuteAndConvertTool, a component that executes a Jupyter +notebook file and converts it to a specified format (e.g., HTML or PDF) using nbconvert. +It leverages the ToolBase and ComponentBase classes from the swarmauri framework to integrate +seamlessly with the system's tool architecture. + +The JupyterExecuteAndConvertTool can handle timeouts, log the execution process, and return +information about the converted file. Errors are handled gracefully and surfaced back to +the caller as needed. +""" + +import os +import logging +import subprocess +from typing import List, Dict, Any, Literal +from pydantic import Field + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterExecuteAndConvertTool") +class JupyterExecuteAndConvertTool(ToolBase): + """ + JupyterExecuteAndConvertTool is a tool that executes a Jupyter notebook file via the + nbconvert CLI and then converts the executed notebook to a specified output format. + It handles timeouts, logs the process, and provides a return value that contains + the path to the converted file and the status of the operation. + + Attributes: + version (str): The version of the JupyterExecuteAndConvertTool. + parameters (List[Parameter]): A list of parameters required to perform the + notebook execution and conversion. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterExecuteAndConvertTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_path", + type="string", + description="Path of the Jupyter notebook file to execute.", + required=True, + ), + Parameter( + name="output_format", + type="string", + description="The format to which the executed notebook should be converted (e.g., 'html', 'pdf').", + required=True, + enum=["html", "pdf"], + ), + Parameter( + name="execution_timeout", + type="number", + description="Timeout (in seconds) for notebook execution.", + required=False, + ), + ] + ) + name: str = "JupyterExecuteAndConvertTool" + description: str = ( + "Executes a Jupyter notebook and converts it to a specified format." + ) + type: Literal["JupyterExecuteAndConvertTool"] = "JupyterExecuteAndConvertTool" + + def __call__( + self, + notebook_path: str, + output_format: str = "html", + execution_timeout: int = 600, + ) -> Dict[str, Any]: + """ + Executes the specified Jupyter notebook file and converts it to the chosen output format. + + Args: + notebook_path (str): The path to the Jupyter notebook to execute. + output_format (str): The format for the output conversion. Defaults to "html". + execution_timeout (int): The maximum time (in seconds) allowed for execution. + Defaults to 600 (10 minutes). + + Returns: + Dict[str, Any]: A dictionary containing the conversion status and path to the + output file. In case of an error, the dictionary keys "error" and "message" + will be set to describe the problem. + + Example: + >>> tool = JupyterExecuteAndConvertTool() + >>> result = tool( + ... notebook_path="example_notebook.ipynb", + ... output_format="pdf", + ... execution_timeout=300 + ... ) + >>> print(result) + { + "converted_file": "example_notebook.pdf", + "status": "success" + } + """ + try: + logger.info("Starting Jupyter notebook execution process.") + if not os.path.exists(notebook_path): + logger.error(f"Notebook not found: {notebook_path}") + return { + "error": "Notebook file does not exist.", + "message": notebook_path, + } + + # Derive base name and set output notebook name + base_name = os.path.splitext(os.path.basename(notebook_path))[0] + executed_notebook = f"executed_{base_name}.ipynb" + + # Execute the notebook via CLI with nbconvert + execute_cmd = [ + "jupyter", + "nbconvert", + "--to", + "notebook", + "--execute", + notebook_path, + "--output", + executed_notebook, + ] + + logger.info(f"Executing notebook via command: {' '.join(execute_cmd)}") + subprocess.run(execute_cmd, check=True, timeout=execution_timeout) + logger.info(f"Notebook execution completed: {executed_notebook}") + + except subprocess.TimeoutExpired: + logger.error( + f"Notebook execution timed out after {execution_timeout} seconds." + ) + return { + "error": "TimeoutExpired", + "message": f"Notebook execution timed out after {execution_timeout} seconds.", + } + except subprocess.CalledProcessError as cpe: + logger.error(f"Error occurred during notebook execution: {str(cpe)}") + return { + "error": "ExecutionError", + "message": f"An error occurred while executing the notebook: {str(cpe)}", + } + except Exception as e: + logger.error(f"Unexpected error during execution: {str(e)}") + return { + "error": "UnexpectedError", + "message": f"An unexpected error occurred: {str(e)}", + } + + try: + logger.info("Starting notebook conversion process.") + # Determine the conversion command + convert_cmd = [ + "jupyter", + "nbconvert", + "--to", + output_format, + executed_notebook, + ] + logger.info(f"Converting notebook via command: {' '.join(convert_cmd)}") + subprocess.run(convert_cmd, check=True) + + # Determine the name of the converted file + converted_file = f"{os.path.splitext(executed_notebook)[0]}.{output_format}" + logger.info( + f"Notebook successfully converted to {output_format}. File: {converted_file}" + ) + + return {"converted_file": converted_file, "status": "success"} + + except subprocess.CalledProcessError as cpe: + logger.error(f"Error occurred during notebook conversion: {str(cpe)}") + return { + "error": "ConversionError", + "message": f"An error occurred while converting the notebook: {str(cpe)}", + } + except Exception as e: + logger.error(f"Unexpected error during conversion: {str(e)}") + return { + "error": "UnexpectedError", + "message": f"An unexpected error occurred: {str(e)}", + } diff --git a/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/swarmauri_tool_jupyterexecuteandconvert/__init__.py b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/swarmauri_tool_jupyterexecuteandconvert/__init__.py new file mode 100644 index 000000000..edd7cbadb --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/swarmauri_tool_jupyterexecuteandconvert/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterexecuteandconvert.JupyterExecuteAndConvertTool import ( + JupyterExecuteAndConvertTool, +) + + +__all__ = ["JupyterExecuteAndConvertTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterexecuteandconvert") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/tests/unit/test_JupyterExecuteAndConvertTool.py b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/tests/unit/test_JupyterExecuteAndConvertTool.py new file mode 100644 index 000000000..bb326bae3 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/tests/unit/test_JupyterExecuteAndConvertTool.py @@ -0,0 +1,155 @@ +import os +import pytest +import subprocess +from unittest.mock import patch, MagicMock +from swarmauri_tool_jupyterexecuteandconvert.JupyterExecuteAndConvertTool import ( + JupyterExecuteAndConvertTool, +) + + +@pytest.fixture +def tool_instance() -> JupyterExecuteAndConvertTool: + """ + Pytest fixture to create an instance of JupyterExecuteAndConvertTool. + """ + return JupyterExecuteAndConvertTool() + + +def test_tool_attributes(tool_instance: JupyterExecuteAndConvertTool) -> None: + """ + Test that the JupyterExecuteAndConvertTool instance has the expected attributes. + """ + assert tool_instance.name == "JupyterExecuteAndConvertTool" + assert ( + tool_instance.description + == "Executes a Jupyter notebook and converts it to a specified format." + ) + assert tool_instance.type == "JupyterExecuteAndConvertTool" + assert len(tool_instance.parameters) == 3 + + +def test_notebook_not_found(tool_instance: JupyterExecuteAndConvertTool) -> None: + """ + Test that providing a non-existent notebook path returns an error indicating the file does not exist. + """ + result = tool_instance( + notebook_path="non_existent_notebook.ipynb", + output_format="html", + execution_timeout=10, + ) + assert "error" in result + assert result["error"] == "Notebook file does not exist." + + +@patch("subprocess.run") +def test_successful_execution_and_conversion( + mock_subprocess: MagicMock, tool_instance: JupyterExecuteAndConvertTool +) -> None: + """ + Test successful execution and conversion of a notebook by mocking subprocess.run. + Ensures the tool returns a dictionary with status 'success' and the correct converted file name. + """ + # Mock to simulate successful execution and conversion + mock_subprocess.return_value = None + + # Create a temporary notebook file for the sake of the test + temp_notebook = "test_notebook.ipynb" + with open(temp_notebook, "w", encoding="utf-8") as f: + f.write("# Test notebook content") + + result = tool_instance( + notebook_path=temp_notebook, output_format="html", execution_timeout=10 + ) + + # Clean up the temporary file + os.remove(temp_notebook) + + assert "status" in result + assert result["status"] == "success" + assert "converted_file" in result + assert result["converted_file"].endswith(".html") + + +@patch("subprocess.run") +def test_execution_timeout( + mock_subprocess: MagicMock, tool_instance: JupyterExecuteAndConvertTool +) -> None: + """ + Test that a TimeoutExpired exception is handled properly. + Ensures the tool returns an error dictionary with the appropriate keys. + """ + # Mock to raise TimeoutExpired + mock_subprocess.side_effect = subprocess.TimeoutExpired( + cmd="jupyter nbconvert", timeout=5 + ) + + temp_notebook = "timeout_notebook.ipynb" + with open(temp_notebook, "w", encoding="utf-8") as f: + f.write("# Test notebook content for timeout") + + result = tool_instance( + notebook_path=temp_notebook, output_format="html", execution_timeout=1 + ) + + os.remove(temp_notebook) + + assert "error" in result + assert result["error"] == "TimeoutExpired" + + +@patch("subprocess.run") +def test_execution_error( + mock_subprocess: MagicMock, tool_instance: JupyterExecuteAndConvertTool +) -> None: + """ + Test that a CalledProcessError during notebook execution is handled properly. + Ensures the tool returns an error dictionary with the appropriate keys. + """ + # Mock to raise CalledProcessError for execution + mock_subprocess.side_effect = subprocess.CalledProcessError( + returncode=1, cmd="jupyter nbconvert" + ) + + temp_notebook = "fail_notebook.ipynb" + with open(temp_notebook, "w", encoding="utf-8") as f: + f.write("# Test notebook content for execution error") + + result = tool_instance( + notebook_path=temp_notebook, output_format="html", execution_timeout=10 + ) + + os.remove(temp_notebook) + + assert "error" in result + assert result["error"] == "ExecutionError" + + +@patch("subprocess.run") +def test_conversion_error( + mock_subprocess: MagicMock, tool_instance: JupyterExecuteAndConvertTool +) -> None: + """ + Test that a CalledProcessError during notebook conversion is handled properly. + Ensures the tool returns an error dictionary with the appropriate keys. + """ + # First run: simulate successful notebook execution + call_effects = [ + None, # Successful execution + subprocess.CalledProcessError( + returncode=2, cmd="jupyter nbconvert" + ), # Conversion fails + ] + mock_subprocess.side_effect = call_effects + + temp_notebook = "conversion_fail_notebook.ipynb" + with open(temp_notebook, "w", encoding="utf-8") as f: + f.write("# Test notebook content for conversion error") + + result = tool_instance( + notebook_path=temp_notebook, output_format="html", execution_timeout=10 + ) + + os.remove(temp_notebook) + + assert "error" in result + assert result["error"] == "ConversionError" diff --git a/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/tests/unit/test___init__.py new file mode 100644 index 000000000..15778b057 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecuteandconvert/tests/unit/test___init__.py @@ -0,0 +1,32 @@ +""" +Unit tests for the swarmauri_tool_jupyterexecuteandconvert package's __init__ module. + +This module tests that the package's initialization logic functions as expected. +Specifically, it ensures that JupyterExecuteAndConvertTool is correctly exposed +and that the package version attribute is accessible. +""" + +from swarmauri_tool_jupyterexecuteandconvert import ( + JupyterExecuteAndConvertTool, + __version__, +) + + +def test_jupyter_execute_and_convert_tool_exposed() -> None: + """ + Test that the JupyterExecuteAndConvertTool is exposed via the package's __init__. + """ + # Create an instance of JupyterExecuteAndConvertTool to verify it's imported correctly. + instance = JupyterExecuteAndConvertTool() + assert instance is not None, ( + "Expected a valid instance of JupyterExecuteAndConvertTool." + ) + + +def test_package_version_available() -> None: + """ + Test that the package version is properly exposed as a string. The version may be '0.0.0' + if the package is not installed (e.g., during development), but it should still be defined. + """ + # Check that __version__ is defined and is a string. + assert isinstance(__version__, str), "Expected __version__ to be a string." diff --git a/pkgs/community/swarmauri_tool_jupyterexecutecell/LICENSE b/pkgs/community/swarmauri_tool_jupyterexecutecell/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutecell/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterexecutecell/README.md b/pkgs/community/swarmauri_tool_jupyterexecutecell/README.md new file mode 100644 index 000000000..60d362982 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutecell/README.md @@ -0,0 +1,118 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterexecutecell +

+ +--- + +# swarmauri_tool_jupyterexecutecell + +The "swarmauri_tool_jupyterexecutecell" package provides a tool that allows you to execute code cells in an active Jupyter kernel, capturing all standard output, errors, and any exceptions that may occur. This makes it useful for programmatically running snippets of Python code within Jupyter environments, such as notebooks or other interactive contexts. + +This package comes with fully functional, well-documented Python modules, following PEP 8 style guidelines and featuring type hints throughout. Each function, method, and class includes explanatory docstrings, helping users to quickly get started and integrate this tool into their own workflows. + +--- + +## Installation + +To install the package from PyPI with all its dependencies, run: + +• Using pip: + pip install swarmauri_tool_jupyterexecutecell + +• Supported Python versions: + - Python 3.10 + - Python 3.11 + - Python 3.12 + - Python 3.13 + +Make sure that Jupyter-related tools (e.g., IPython) are installed for the cell execution functionality to work as expected. If your environment does not already include Jupyter or IPython, you can install them alongside this package (for example, pip install jupyter ipython). + +--- + +## Usage + +After installation, you can import and use the JupyterExecuteCellTool to execute small code snippets within a running Jupyter session: + +from swarmauri_tool_jupyterexecutecell import JupyterExecuteCellTool + +# Instantiate the tool +tool = JupyterExecuteCellTool() + +# Provide some code to execute +code_to_run = "print('Hello from swarmauri!')" + +# Execute the code in the Jupyter kernel +result = tool(code_to_run) + +# The 'result' dictionary contains three keys: 'stdout', 'stderr', and 'error'. +print("Captured standard output:") +print(result["stdout"]) + +print("Captured standard error (if any):") +print(result["stderr"]) + +print("Captured error messages (if any):") +print(result["error"]) + +If the execution times out (default is 30 seconds), the returned dictionary’s "error" key will contain a timeout message. You can override the default timeout by passing a second argument: + +result = tool(code_to_run, timeout=60) # 60-second timeout + +## Examples + +1. Executing Basic Python Statements: + + code_to_run = "a = 10\nb = 20\nprint(a + b)" + result = tool(code_to_run) + # result["stdout"] will contain '30' + # result["stderr"] and result["error"] should be empty if everything worked correctly. + +2. Handling Exceptions: + + code_with_error = "print(1/0)" # Division by zero + result = tool(code_with_error) + # result["stdout"] should be empty + # result["stderr"] or result["error"] will contain information about the ZeroDivisionError. + +3. Complex Operations Requiring More Time: + + code_with_long_process = ''' +import time +time.sleep(10) +print("Long operation finished!") +''' + result = tool(code_with_long_process, timeout=15) + # Will complete successfully if it finishes under 15 seconds. + # If it exceeds the specified timeout, the "error" key will note the timeout event. + +--- + +## Dependencies + +• swarmauri_core for core support. +• swarmauri_base for base tool classes. +• jupyter_client (and typically IPython) for Jupyter interaction. + +Consult the pyproject.toml for additional dev/test dependencies. + +--- + +## Additional Notes + +• The package is designed to work seamlessly in Jupyter-based environments but also includes robust error handling and logging. +• All user-facing methods and classes are fully implemented with docstrings and type hints, ensuring clarity and strong typing. +• The JupyterExecuteCellTool inherits from the ToolBase class and is registered via the ComponentBase for easy integration into the broader Swarmauri ecosystem. + +We hope you find this tool helpful in automating or simplifying code execution within Jupyter kernels. Enjoy effortless cell execution and output management with swarmauri_tool_jupyterexecutecell! \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecutecell/payload.json b/pkgs/community/swarmauri_tool_jupyterexecutecell/payload.json new file mode 100644 index 000000000..3b2dd5389 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutecell/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to execute a single code cell in a running Jupyter kernel using jupyter_client, capturing its output and errors.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterexecutecell", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterExecuteCellTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Send a code cell to the running kernel for execution.", + "Capture and log the output and errors of the cell.", + "Support synchronous execution with timeout settings.", + "Return the cell execution result for further processing.", + "Handle execution failures gracefully.", + "Integrate with notebook execution workflows." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "jupyter_client", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecutecell/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterexecutecell/pyproject.toml new file mode 100644 index 000000000..b224a3a57 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutecell/pyproject.toml @@ -0,0 +1,57 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterexecutecell" +version = "0.6.1" +description = "A tool designed to execute a single code cell in a running Jupyter kernel using jupyter_client, capturing its output and errors." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterexecutecell/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +jupyter_client = "^8.6.3" +IPython = "^8.32.0" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterexecutecelltool = "swarmauri_tool_jupyterexecutecell:JupyterExecuteCellTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecutecell/swarmauri_tool_jupyterexecutecell/JupyterExecuteCellTool.py b/pkgs/community/swarmauri_tool_jupyterexecutecell/swarmauri_tool_jupyterexecutecell/JupyterExecuteCellTool.py new file mode 100644 index 000000000..d2b7f5cb7 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutecell/swarmauri_tool_jupyterexecutecell/JupyterExecuteCellTool.py @@ -0,0 +1,145 @@ +""" +JupyterExecuteCellTool.py + +This module defines the JupyterExecuteCellTool, a component that sends code cells to the +Jupyter kernel for execution, captures their output, and returns the results. It leverages +the ToolBase and ComponentBase classes from the swarmauri framework to integrate seamlessly +with the system's tool architecture. + +The JupyterExecuteCellTool supports synchronous code execution with a configurable timeout +interval. The tool logs and gracefully handles execution failures, returning any errors +captured during execution. +""" + +import concurrent.futures +import logging +import io +import traceback +from contextlib import redirect_stdout, redirect_stderr +from typing import Dict, List, Literal, Optional + +from IPython import get_ipython +from pydantic import Field + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterExecuteCellTool") +class JupyterExecuteCellTool(ToolBase): + """ + JupyterExecuteCellTool is a tool that sends code to a Jupyter kernel for execution, + capturing stdout, stderr, and any exceptions encountered. It supports a configurable + timeout to prevent long-running code from blocking execution indefinitely. + + Attributes: + version (str): The version of the JupyterExecuteCellTool. + parameters (List[Parameter]): A list of parameters required to execute a notebook cell. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterExecuteCellTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="code", + type="string", + description="The code to be executed in the Jupyter kernel.", + required=True, + ), + Parameter( + name="timeout", + type="number", + description="Timeout in seconds for the cell execution.", + required=False, + default=30, + ), + ] + ) + name: str = "JupyterExecuteCellTool" + description: str = "Executes code cells within a Jupyter kernel environment." + type: Literal["JupyterExecuteCellTool"] = "JupyterExecuteCellTool" + + def __call__(self, code: str, timeout: Optional[int] = 30) -> Dict[str, str]: + """ + Executes the provided code cell in a Jupyter kernel with a specified timeout. + + Args: + code (str): The code cell content to execute. + timeout (Optional[int]): The maximum number of seconds to allow for code execution. + Defaults to 30 seconds. + + Returns: + Dict[str, str]: A dictionary containing the execution results. Keys include: + - 'stdout': The standard output captured from the execution. + - 'stderr': The error output captured from the execution, if any. + - 'error': Any exception or error message if the execution fails or times out. + + Example: + >>> executor = JupyterExecuteCellTool() + >>> result = executor("print('Hello, world!')") + >>> print(result['stdout']) # Should contain "Hello, world!" + """ + + def _run_code(cell_code: str) -> Dict[str, str]: + """ + Internal helper function to run the provided code in the current IPython kernel, + capturing stdout and stderr. + """ + ip = get_ipython() + if not ip: + logger.error("No active IPython kernel found.") + return { + "stdout": "", + "stderr": "No active IPython kernel found.", + "error": "KernelNotFoundError", + } + + stdout_buffer = io.StringIO() + stderr_buffer = io.StringIO() + + try: + with redirect_stdout(stdout_buffer), redirect_stderr(stderr_buffer): + # Execute the cell in IPython with store_history=True so that it behaves + # like a normal code cell in a notebook environment. + ip.run_cell(cell_code, store_history=True) + except Exception as exc: + logger.error("An exception occurred while executing the cell: %s", exc) + return { + "stdout": stdout_buffer.getvalue(), + "stderr": stderr_buffer.getvalue(), + "error": str(traceback.format_exc()), + } + + return { + "stdout": stdout_buffer.getvalue(), + "stderr": stderr_buffer.getvalue(), + "error": "", + } + + # Use a ThreadPoolExecutor to support timeouts during synchronous execution. + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit(_run_code, code) + try: + result = future.result(timeout=timeout) + logger.info("Cell executed successfully.") + return result + except concurrent.futures.TimeoutError: + logger.error("Cell execution exceeded timeout of %s seconds.", timeout) + return { + "stdout": "", + "stderr": "", + "error": f"Execution timed out after {timeout} seconds.", + } + except Exception as exc: + logger.error("Unexpected error during cell execution: %s", exc) + return { + "stdout": "", + "stderr": "", + "error": f"An unexpected error occurred: {str(exc)}", + } diff --git a/pkgs/community/swarmauri_tool_jupyterexecutecell/swarmauri_tool_jupyterexecutecell/__init__.py b/pkgs/community/swarmauri_tool_jupyterexecutecell/swarmauri_tool_jupyterexecutecell/__init__.py new file mode 100644 index 000000000..5df0201d2 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutecell/swarmauri_tool_jupyterexecutecell/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterexecutecell.JupyterExecuteCellTool import ( + JupyterExecuteCellTool, +) + + +__all__ = ["JupyterExecuteCellTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterexecutecell") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterexecutecell/tests/unit/test_JupyterExecuteCellTool.py b/pkgs/community/swarmauri_tool_jupyterexecutecell/tests/unit/test_JupyterExecuteCellTool.py new file mode 100644 index 000000000..94f7f8d20 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutecell/tests/unit/test_JupyterExecuteCellTool.py @@ -0,0 +1,114 @@ +from unittest.mock import patch, MagicMock +from swarmauri_tool_jupyterexecutecell.JupyterExecuteCellTool import ( + JupyterExecuteCellTool, +) + + +def test_tool_initialization(): + """ + Test the initialization of JupyterExecuteCellTool, verifying its default attributes. + """ + tool = JupyterExecuteCellTool() + assert tool.name == "JupyterExecuteCellTool", "Tool name should match." + assert tool.version == "1.0.0", "Tool version should be '1.0.0'." + assert ( + tool.description == "Executes code cells within a Jupyter kernel environment." + ) + assert tool.type == "JupyterExecuteCellTool", ( + "Tool type should be 'JupyterExecuteCellTool'." + ) + assert len(tool.parameters) == 2, ( + "There should be two default parameters: code, timeout." + ) + + +def test_tool_parameters(): + """ + Test that the tool's parameter list includes the expected attributes. + """ + tool = JupyterExecuteCellTool() + param_names = [param.name for param in tool.parameters] + assert "code" in param_names, "Parameters must include 'code'." + assert "timeout" in param_names, "Parameters must include 'timeout'." + + +def test_tool_call_basic_execution(): + """ + Test that the tool can execute a simple print statement and capture its output. + """ + tool = JupyterExecuteCellTool() + result = tool("print('Hello, world!')") + assert "Hello, world!" in result["stdout"], ( + "Expected code execution output not found in stdout." + ) + assert result["stderr"] == "", "stderr should be empty when executing valid code." + assert result["error"] == "", "error should be empty when executing valid code." + + +def test_tool_call_syntax_error(): + """ + Test that the tool captures Python syntax errors appropriately. + """ + tool = JupyterExecuteCellTool() + result = tool("print('Missing parenthesis'") + assert "SyntaxError" in result["error"], ( + "Expected a SyntaxError in the error field." + ) + assert result["stderr"] != "", "stderr should capture syntax error details." + + +def test_tool_call_timeout(): + """ + Test that the tool handles code execution timeouts and returns an appropriate error message. + """ + tool = JupyterExecuteCellTool() + # This code sleeps for 3 seconds, but we enforce a 1-second timeout to trigger a timeout error. + result = tool("import time; time.sleep(3)", timeout=1) + assert "Execution timed out after 1 seconds." in result["error"], ( + "Expected timeout error message." + ) + assert result["stdout"] == "", "stdout should be empty on timeout." + assert result["stderr"] == "", "stderr should be empty on timeout." + + +@patch( + "swarmauri_tool_jupyterexecutecell.JupyterExecuteCellTool.get_ipython", + return_value=None, +) +def test_tool_call_no_active_kernel(mock_ipython): + """ + Test that the tool reports an error when there is no active IPython kernel. + """ + tool = JupyterExecuteCellTool() + result = tool("print('Hello')", timeout=1) + assert result["stderr"] == "No active IPython kernel found.", ( + "Expected stderr to mention no active kernel." + ) + assert result["error"] == "KernelNotFoundError", ( + "Expected error to be 'KernelNotFoundError'." + ) + assert result["stdout"] == "", "stdout should be empty when no kernel is found." + + +@patch("swarmauri_tool_jupyterexecutecell.JupyterExecuteCellTool.get_ipython") +def test_tool_call_exception_during_execution(mock_ipython): + """ + Test that the tool captures and logs exceptions raised during code execution. + """ + # Mock a scenario where run_cell raises an exception. + mock_shell = MagicMock() + mock_shell.run_cell.side_effect = RuntimeError("Mocked runtime error") + mock_ipython.return_value = mock_shell + + tool = JupyterExecuteCellTool() + result = tool("print('Testing exception')") + assert "Mocked runtime error" in result["error"], ( + "Expected mocked runtime error in the error field." + ) + assert "RuntimeError" in result["error"], ( + "Expected 'RuntimeError' text in error field." + ) + assert result["stderr"] != "", "stderr should capture exception details." + assert "Testing exception" not in result["stdout"], ( + "stdout should not have content from failing command." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterexecutecell/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterexecutecell/tests/unit/test___init__.py new file mode 100644 index 000000000..5a4f8902f --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutecell/tests/unit/test___init__.py @@ -0,0 +1,83 @@ +""" +Unit tests for verifying correct exposure of the JupyterExecuteCellTool class +and the package version from the swarmauri_tool_jupyterexecutecell package's +__init__.py file. +""" + +from typing import Any + +# Import directly from the package's __init__.py +from swarmauri_tool_jupyterexecutecell import ( + JupyterExecuteCellTool, + __version__, + __all__ as exposed_items, +) + + +def test_jupyter_execute_cell_tool_exposed() -> None: + """ + Test if the JupyterExecuteCellTool is exposed correctly by the package's __init__.py. + """ + # Ensure the imported class is not None. + assert JupyterExecuteCellTool is not None, ( + "JupyterExecuteCellTool should not be None." + ) + + +def test_jupyter_execute_cell_tool_in_all() -> None: + """ + Test that JupyterExecuteCellTool is included in the package's __all__ attribute. + """ + assert "JupyterExecuteCellTool" in exposed_items, ( + "JupyterExecuteCellTool should be listed in __all__." + ) + + +def test_version_is_string() -> None: + """ + Test that the package's __version__ attribute is a string. + """ + assert isinstance(__version__, str), "__version__ should be a string." + + +def test_jupyter_execute_cell_tool_inherit_base_class() -> None: + """ + Test that JupyterExecuteCellTool inherits from its expected base class. + + Note: + Replace 'ExpectedBaseClass' with the actual base class name if different. + """ + + # For demonstration only; adjust to match actual base class usage. + class ExpectedBaseClass: + """ + A placeholder base class to demonstrate inheritance checking. + Replace this with the actual base class used by JupyterExecuteCellTool. + """ + + # Check whether JupyterExecuteCellTool is a subclass of the placeholder base class. + # Adjust as appropriate to reflect the real inheritance hierarchy. + assert issubclass(JupyterExecuteCellTool, ExpectedBaseClass), ( + "JupyterExecuteCellTool should inherit from the expected base class." + ) + + +def test_jupyter_execute_cell_tool_methods() -> None: + """ + Test that JupyterExecuteCellTool methods exist and function as expected. + + Adjust the method checks to align with the actual methods + required for JupyterExecuteCellTool. + """ + tool_instance: Any = JupyterExecuteCellTool() + + # Example: Check if an execute method exists. + # Adjust to match the actual method name and usage. + assert hasattr(tool_instance, "execute_cell"), ( + "JupyterExecuteCellTool should define 'execute_cell' method." + ) + + # Example call. Replace with realistic test logic. + # The test is purely demonstrative; actual tests should verify real logic. + execute_result = tool_instance.execute_cell("print('Test')") + assert execute_result is not None, "execute_cell method should return a result." diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebook/LICENSE b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebook/README.md b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/README.md new file mode 100644 index 000000000..d7fc9716d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/README.md @@ -0,0 +1,92 @@ + + +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterexecutenotebook +

+ +--- + +# swarmauri_tool_jupyterexecutenotebook + +The swarmauri_tool_jupyterexecutenotebook package provides a tool for executing all cells in a Jupyter notebook in sequence, capturing outputs and returning the fully updated NotebookNode object. It leverages the Swarmauri framework's base and core components. + +## Installation + +To install swarmauri_tool_jupyterexecutenotebook, make sure you have Python 3.10 or later: + +1. Using pip: + • (Recommended) Create and activate a virtual environment. + • Run: + pip install swarmauri_tool_jupyterexecutenotebook + +2. Using Poetry in an existing project: + • poetry add swarmauri_tool_jupyterexecutenotebook + +This will automatically install all dependencies required to run the JupyterExecuteNotebookTool. + +## Usage + +The principal component of this package is the JupyterExecuteNotebookTool, which executes a given notebook, capturing cell outputs and errors. Below is a quick reference for using the tool programmatically in your Python code. + +Example usage: + +--------------------------------------------------------------------------------- +from swarmauri_tool_jupyterexecutenotebook import JupyterExecuteNotebookTool + +def execute_my_notebook(): + """ + Demonstrates how to instantiate and use the JupyterExecuteNotebookTool to + execute a Jupyter notebook file. This includes capturing outputs and + error messages. + """ + # Create an instance of the tool + tool = JupyterExecuteNotebookTool() + + # Execute the Jupyter notebook; specify the path to your notebook + executed_notebook = tool( + notebook_path="my_notebook.ipynb", + timeout=60 # Optional: defaults to 30 if not provided + ) + + # The returned `executed_notebook` is a NotebookNode with outputs captured + return executed_notebook + +if __name__ == "__main__": + result_notebook = execute_my_notebook() + # You can further analyze 'result_notebook' outputs here +--------------------------------------------------------------------------------- + +In this example: +• The notebook_path parameter is a required string referencing the target notebook file. +• The optional timeout parameter defines how long each cell can take to run before throwing an error (default is 30 seconds). + +The executed NotebookNode object will contain both new outputs and any error messages generated during execution. + +## Dependencies + +This package relies on: +• swarmauri_core for base components. +• swarmauri_base for the ToolBase class. +• nbconvert, nbformat, and nbclient for handling and executing Jupyter notebooks. + +When you install swarmauri_tool_jupyterexecutenotebook via pip or Poetry, these dependencies are automatically handled for you. Refer to the project's pyproject.toml for the full list of dependencies and version requirements. + +--- + +This README is provided as part of the swarmauri_tool_jupyterexecutenotebook package. If you have any questions or issues, please consult our documentation or open a support request. Thank you for using Swarmauri! \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebook/payload.json b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/payload.json new file mode 100644 index 000000000..80f013cc4 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/payload.json @@ -0,0 +1,30 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to execute all cells in a Jupyter Notebook using nbconvert\u2019s ExecutePreprocessor, capturing outputs for testing and reporting.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterexecutenotebook", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterExecuteNotebookTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Execute all cells in the notebook sequentially.", + "Capture and log output and errors.", + "Support configurable execution timeouts.", + "Update the NotebookNode with execution outputs.", + "Handle cell execution failures gracefully.", + "Return the executed notebook object." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbconvert", + "version": "*" + }, + { + "name": "nbformat", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebook/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/pyproject.toml new file mode 100644 index 000000000..2311097a2 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/pyproject.toml @@ -0,0 +1,57 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterexecutenotebook" +version = "0.6.1" +description = "A tool designed to execute all cells in a Jupyter Notebook using nbconvert’s ExecutePreprocessor, capturing outputs for testing and reporting." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterexecutenotebook/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbconvert = "^7.16.6" +nbformat = "^5.10.4" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterexecutenotebooktool = "swarmauri_tool_jupyterexecutenotebook:JupyterExecuteNotebookTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebook/swarmauri_tool_jupyterexecutenotebook/JupyterExecuteNotebookTool.py b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/swarmauri_tool_jupyterexecutenotebook/JupyterExecuteNotebookTool.py new file mode 100644 index 000000000..372cb0102 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/swarmauri_tool_jupyterexecutenotebook/JupyterExecuteNotebookTool.py @@ -0,0 +1,117 @@ +""" +JupyterExecuteNotebookTool.py + +This module defines the JupyterExecuteNotebookTool, which executes all cells +in a Jupyter notebook sequentially, capturing outputs and errors, and returns +the executed notebook object. It leverages the ToolBase and ComponentBase +classes from the swarmauri framework to integrate seamlessly with the system's +tool architecture. + +The JupyterExecuteNotebookTool supports configurable execution timeouts and +handles cell execution failures gracefully. The executed NotebookNode is +updated with outputs produced during execution. +""" + +import logging +from typing import List, Literal +from pydantic import Field +from nbformat import read, NO_CONVERT +from nbformat.notebooknode import NotebookNode +from nbclient import NotebookClient +from nbclient.exceptions import CellExecutionError + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterExecuteNotebookTool") +class JupyterExecuteNotebookTool(ToolBase): + """ + JupyterExecuteNotebookTool is a tool that executes a Jupyter notebook by running + all cells sequentially. It captures and logs the outputs or errors produced + during the execution and returns the executed notebook object. + + Attributes: + version (str): The version of the JupyterExecuteNotebookTool. + parameters (List[Parameter]): A list of parameters required to execute the notebook. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterExecuteNotebookTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_path", + type="string", + description="The path to the Jupyter notebook to be executed.", + required=True, + ), + Parameter( + name="timeout", + type="number", + description="Maximum time (in seconds) for each cell to execute. Defaults to 30.", + required=False, + ), + ] + ) + name: str = "JupyterExecuteNotebookTool" + description: str = "Executes a Jupyter notebook and captures outputs." + type: Literal["JupyterExecuteNotebookTool"] = "JupyterExecuteNotebookTool" + + def __call__(self, notebook_path: str, timeout: int = 30) -> NotebookNode: + """ + Executes the given Jupyter notebook by running all cells sequentially. Captures + all outputs and errors, updating the NotebookNode object with the results. + + Args: + notebook_path (str): The file path to the Jupyter notebook to execute. + timeout (int, optional): The maximum time (in seconds) allowed for each + cell to execute. Defaults to 30. + + Returns: + NotebookNode: The notebook object after execution, containing updated + outputs. If cell execution fails, the error is recorded + in the notebook outputs. + + Example: + >>> tool = JupyterExecuteNotebookTool() + >>> executed_notebook = tool("example_notebook.ipynb", 60) + >>> # The returned NotebookNode now contains the executed cells and outputs. + """ + logger.info("Starting notebook execution with JupyterExecuteNotebookTool.") + logger.debug(f"Notebook path: {notebook_path}") + logger.debug(f"Execution timeout: {timeout} seconds") + + try: + with open(notebook_path, "r", encoding="utf-8") as f: + notebook: NotebookNode = read(f, NO_CONVERT) + + # Create a client to execute the notebook + client = NotebookClient( + notebook, + timeout=timeout, + kernel_name="python3", + allow_errors=True, # Continue execution even if a cell fails + ) + + logger.info("Executing notebook cells...") + client.execute() + logger.info("Notebook execution completed successfully.") + return notebook + + except CellExecutionError as e: + logger.error("A cell execution error occurred.") + logger.exception(e) + # The executed notebook still contains partial output and the error details. + return notebook + + except Exception as e: + logger.error("An unexpected error occurred during notebook execution.") + logger.exception(e) + # Return the partially executed or unmodified notebook in case of failure. + return notebook diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebook/swarmauri_tool_jupyterexecutenotebook/__init__.py b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/swarmauri_tool_jupyterexecutenotebook/__init__.py new file mode 100644 index 000000000..f90c7228e --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/swarmauri_tool_jupyterexecutenotebook/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterexecutenotebook.JupyterExecuteNotebookTool import ( + JupyterExecuteNotebookTool, +) + + +__all__ = ["JupyterExecuteNotebookTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterexecutenotebook") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebook/tests/unit/test_JupyterExecuteNotebookTool.py b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/tests/unit/test_JupyterExecuteNotebookTool.py new file mode 100644 index 000000000..6c1026af2 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/tests/unit/test_JupyterExecuteNotebookTool.py @@ -0,0 +1,120 @@ +"""Unit tests for JupyterExecuteNotebookTool. + +This module contains pytest-based unit tests that verify the functionality of +the JupyterExecuteNotebookTool class. The tests ensure that the tool correctly +executes Jupyter notebooks, handles errors, and returns the expected results. +""" + +from unittest.mock import patch, MagicMock, mock_open +from nbclient.exceptions import CellExecutionError +from nbformat.notebooknode import NotebookNode + +from swarmauri_tool_jupyterexecutenotebook.JupyterExecuteNotebookTool import ( + JupyterExecuteNotebookTool, +) +from swarmauri_base.tools.ToolBase import ToolBase + + +def test_inheritance() -> None: + """ + Test that JupyterExecuteNotebookTool inherits from the base class ToolBase. + """ + assert issubclass(JupyterExecuteNotebookTool, ToolBase), ( + "JupyterExecuteNotebookTool should inherit from ToolBase." + ) + + +def test_default_attributes() -> None: + """ + Test that JupyterExecuteNotebookTool has all expected default attributes. + """ + tool = JupyterExecuteNotebookTool() + assert tool.version == "1.0.0", "Expected default version to be 1.0.0." + assert tool.name == "JupyterExecuteNotebookTool", ( + "Expected default name to be JupyterExecuteNotebookTool." + ) + assert tool.type == "JupyterExecuteNotebookTool", ( + "Expected tool type to be JupyterExecuteNotebookTool." + ) + assert len(tool.parameters) == 2, ( + "Expected two default parameters (notebook_path, timeout)." + ) + + +@patch("builtins.open", new_callable=mock_open, read_data="{}") +@patch("nbformat.read") +@patch("nbclient.NotebookClient") +def test_call_executes_notebook( + mock_notebook_client: MagicMock, mock_nbformat_read: MagicMock, mock_file: MagicMock +) -> None: + """ + Test that calling the tool with valid arguments executes the notebook + and returns the updated NotebookNode. + """ + mock_notebook = MagicMock(spec=NotebookNode) + mock_nbformat_read.return_value = mock_notebook + client_instance = MagicMock() + mock_notebook_client.return_value = client_instance + + tool = JupyterExecuteNotebookTool() + result = tool("fake_notebook.ipynb", timeout=60) + + mock_notebook_client.assert_called_once_with( + mock_notebook, timeout=60, kernel_name="python3", allow_errors=True + ) + client_instance.execute.assert_called_once() + assert result == mock_notebook, ( + "Expected the tool to return the executed NotebookNode." + ) + + +@patch("builtins.open", new_callable=mock_open, read_data="{}") +@patch("nbformat.read") +@patch("nbclient.NotebookClient") +def test_call_cell_execution_error( + mock_notebook_client: MagicMock, mock_nbformat_read: MagicMock, mock_file: MagicMock +) -> None: + """ + Test that a CellExecutionError raised during notebook execution is logged + and the partially executed notebook is returned. + """ + mock_notebook = MagicMock(spec=NotebookNode) + mock_nbformat_read.return_value = mock_notebook + client_instance = MagicMock() + mock_notebook_client.return_value = client_instance + + # Configure the client to raise CellExecutionError + client_instance.execute.side_effect = CellExecutionError("Error in cell execution.") + + tool = JupyterExecuteNotebookTool() + result = tool("fake_notebook.ipynb") + + assert result == mock_notebook, ( + "Even if a CellExecutionError occurs, the tool should return the notebook." + ) + + +@patch("builtins.open", new_callable=mock_open, read_data="{}") +@patch("nbformat.read") +@patch("nbclient.NotebookClient") +def test_call_unexpected_exception( + mock_notebook_client: MagicMock, mock_nbformat_read: MagicMock, mock_file: MagicMock +) -> None: + """ + Test that an unexpected exception during notebook execution is logged + and the notebook (possibly unmodified) is still returned. + """ + mock_notebook = MagicMock(spec=NotebookNode) + mock_nbformat_read.return_value = mock_notebook + client_instance = MagicMock() + mock_notebook_client.return_value = client_instance + + # Configure the client to raise a generic exception + client_instance.execute.side_effect = ValueError("Unexpected error.") + + tool = JupyterExecuteNotebookTool() + result = tool("fake_notebook.ipynb") + + assert result == mock_notebook, ( + "When an unexpected exception occurs, the tool should still return the notebook." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebook/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/tests/unit/test___init__.py new file mode 100644 index 000000000..f57b1fe8b --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebook/tests/unit/test___init__.py @@ -0,0 +1,78 @@ +"""Tests for the swarmauri_tool_jupyterexecutenotebook package initialization. + +This module ensures that the __init__.py correctly exposes the necessary +components, including the JupyterExecuteNotebookTool class and the __version__ +attribute. +""" + + +def test_jupyter_execute_notebook_tool_exposed() -> None: + """ + Test that JupyterExecuteNotebookTool is successfully exposed by the package. + + This verifies that the class is accessible at the package's top level, + ensuring __init__.py correctly re-exports the component. + """ + # Import the tool from the package top level. + # If the import fails, this test will error out. + from swarmauri_tool_jupyterexecutenotebook import JupyterExecuteNotebookTool + + # Check if the imported object is callable (i.e., a class). + assert callable(JupyterExecuteNotebookTool), ( + "JupyterExecuteNotebookTool should be a callable class." + ) + + +def test_package_version_exposed() -> None: + """ + Test that __version__ is successfully exposed by the package. + + Ensures that the version attribute defined in __init__.py is available, + and that it contains a non-empty string value. + """ + from swarmauri_tool_jupyterexecutenotebook import __version__ + + # Check if __version__ is a non-empty string. + assert isinstance(__version__, str), "__version__ should be a string." + assert len(__version__) > 0, "__version__ should not be an empty string." + + +def test_jupyter_execute_notebook_tool_inherits_base_class() -> None: + """ + Test that JupyterExecuteNotebookTool inherits from its expected base class. + + This ensures the new component class meets the requirement of inheriting + from the appropriate base tool class, providing full implementations of + all required methods. + """ + from swarmauri_tool_jupyterexecutenotebook import JupyterExecuteNotebookTool + from swarmauri_tool_jupyterexecutenotebook.JupyterExecuteNotebookTool import ( + BaseTool, + ) + + # Check subclass relationship to the base class. + assert issubclass(JupyterExecuteNotebookTool, BaseTool), ( + "JupyterExecuteNotebookTool must inherit from BaseTool." + ) + + +def test_jupyter_execute_notebook_tool_methods() -> None: + """ + Test that JupyterExecuteNotebookTool implements all required methods. + + This ensures that JupyterExecuteNotebookTool provides fully functional + implementations and there are no missing methods or abstract placeholders. + """ + from swarmauri_tool_jupyterexecutenotebook import JupyterExecuteNotebookTool + + # Create an instance of the tool for testing. + tool_instance = JupyterExecuteNotebookTool() + + # Example of testing for a required method named 'execute_notebook'. + # Replace with actual required methods in the real implementation. + assert hasattr(tool_instance, "execute_notebook"), ( + "JupyterExecuteNotebookTool must implement 'execute_notebook' method." + ) + assert callable(tool_instance.execute_notebook), ( + "The 'execute_notebook' attribute should be callable." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/LICENSE b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/README.md b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/README.md new file mode 100644 index 000000000..3518ae863 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/README.md @@ -0,0 +1,87 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterexecutenotebookwithparameters +

+ +--- + +# `swarmauri_tool_jupyterexecutenotebookwithparameters` + +## Overview + +This package provides a tool for executing Jupyter notebooks with custom parameters, leveraging the power of papermill. With the incorporated parameter injection, you can modify variables and data sources without manually editing your notebooks. + +## Installation + +This package is published on the Python Package Index (PyPI). You can install it with: + + pip install swarmauri_tool_jupyterexecutenotebookwithparameters + +### Dependencies + +• papermill +• swarmauri_core >= 0.6.0.dev1 +• swarmauri_base >= 0.6.0.dev1 + +These dependencies will be automatically installed when you install this package from PyPI. + +## Usage + +After installing, import the tool in your Python project: + + from swarmauri_tool_jupyterexecutenotebookwithparameters import JupyterExecuteNotebookWithParametersTool + +### Basic Example + +Create an instance of the tool and call it with the required arguments: + + # Example usage in a script or notebook + + # Instantiate the tool + tool_instance = JupyterExecuteNotebookWithParametersTool() + + # Execute a Jupyter notebook + result = tool_instance( + notebook_path="example_notebook.ipynb", + output_notebook_path="example_output.ipynb", + params={ + "input_data_path": "data/input.csv", + "run_mode": "production" + } + ) + + # Check for success or error + if "executed_notebook" in result: + print(f"Notebook executed successfully. Output saved at: {result['executed_notebook']}") + else: + print(f"Error executing notebook: {result['error']}") + +In this example: +• notebook_path points to the original .ipynb file. +• output_notebook_path is the output file that papermill writes after parameter injection and execution. +• params contains key-value pairs injected into the notebook as variables. + +### Advanced Usage + +• Catching Exceptions: The tool automatically returns a dictionary containing "error" if any exceptions occur during execution, allowing for programmatic error handling in CI/CD pipelines or other automated processes. + +• Parameter Injection: You can pass in any number of parameters to the params dictionary. For instance, toggling debug flags or updating dataset paths dynamically is straightforward with this mechanism. + +• Integration: This tool is designed to be used standalone or within the Swarmauri framework. When integrated into a pipeline, different notebooks can share the same or overridden parameter sets for consistent processing across multiple steps. + +--- + +## Further Development + +This tool follows PEP 8 style guidelines, includes docstrings for all classes and methods, and utilizes type hints for better readability and maintainability. It’s designed to fit seamlessly into your Python projects, enabling powerful, parameterized notebook executions with minimal boilerplate. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/payload.json b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/payload.json new file mode 100644 index 000000000..cebee0598 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to execute parameterized notebooks using papermill, allowing dynamic input and output capture for automated workflows.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterexecutenotebookwithparameters", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterExecuteNotebookWithParametersTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Execute notebooks with injected parameters using papermill.", + "Capture execution outputs and errors.", + "Log execution details for auditing.", + "Handle parameter validation and substitution.", + "Return the path to the executed notebook output.", + "Integrate with CI/CD pipelines for batch processing." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "papermill", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/pyproject.toml new file mode 100644 index 000000000..4426e283f --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/pyproject.toml @@ -0,0 +1,52 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterexecutenotebookwithparameters" +version = "1.0.1" +description = "A tool designed to execute parameterized notebooks using papermill, allowing dynamic input and output capture for automated workflows." +authors = ["Swarmauri Contributors "] +license = "Apache-2.0" +readme = "README.md" +repository = "https://github.com/swarmauri/swarmauri-sdk" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" +papermill = "^2.6.0" +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base" } +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterexecutenotebookwithparameterstool = "swarmauri_tool_jupyterexecutenotebookwithparameters:JupyterExecuteNotebookWithParametersTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/swarmauri_tool_jupyterexecutenotebookwithparameters/JupyterExecuteNotebookWithParametersTool.py b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/swarmauri_tool_jupyterexecutenotebookwithparameters/JupyterExecuteNotebookWithParametersTool.py new file mode 100644 index 000000000..c1b7ebe2c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/swarmauri_tool_jupyterexecutenotebookwithparameters/JupyterExecuteNotebookWithParametersTool.py @@ -0,0 +1,122 @@ +from typing import List, Literal, Dict, Any, Optional +import logging +import papermill as pm +from pydantic import Field +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + +""" +JupyterExecuteNotebookWithParametersTool.py + +This module defines the JupyterExecuteNotebookWithParametersTool, a component that executes Jupyter +notebooks using papermill, injecting custom parameters, capturing execution logs, and returning +the path to the executed notebook output. It inherits from ToolBase and integrates seamlessly +with the Swarmauri framework. +""" + + +@ComponentBase.register_type(ToolBase, "JupyterExecuteNotebookWithParametersTool") +class JupyterExecuteNotebookWithParametersTool(ToolBase): + """ + JupyterExecuteNotebookWithParametersTool is a tool that executes Jupyter notebooks with custom + parameter injection using papermill. This tool captures execution logs, errors, and returns + the path to the resulting executed notebook. It can be utilized within CI/CD pipelines when + batch processing multiple notebooks. + + Attributes: + version (str): The version of the JupyterExecuteNotebookWithParametersTool. + parameters (List[Parameter]): A list of parameters the tool expects, including notebook_path, + output_notebook_path, and params. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterExecuteNotebookWithParametersTool"]): The type identifier. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_path", + type="string", + description="The path to the Jupyter Notebook file to execute.", + required=True, + ), + Parameter( + name="output_notebook_path", + type="string", + description="The path where the output notebook will be saved.", + required=True, + ), + Parameter( + name="params", + type="object", + description="A dictionary of parameters to inject into the notebook.", + required=False, + ), + ] + ) + name: str = "JupyterExecuteNotebookWithParametersTool" + description: str = "Executes Jupyter notebooks with papermill, injecting parameters and capturing outputs." + type: Literal["JupyterExecuteNotebookWithParametersTool"] = ( + "JupyterExecuteNotebookWithParametersTool" + ) + + def __call__( + self, + notebook_path: str, + output_notebook_path: str, + params: Optional[Dict[str, Any]] = None, + ) -> Dict[str, str]: + """ + Executes the specified Jupyter notebook using papermill, injecting the provided parameters + and saving the executed notebook to the specified output path. + + Args: + notebook_path (str): The path to the Jupyter Notebook file to be executed. + output_notebook_path (str): The path where the executed notebook will be saved. + params (Optional[Dict[str, Any]]): A dictionary of parameters to inject into the notebook. + + Returns: + Dict[str, str]: A dictionary containing information about the execution result. If + successful, it includes the key 'executed_notebook' pointing to the + output notebook path. If an error occurs, the dictionary contains an + 'error' key with a descriptive message. + + Raises: + ValueError: If the notebook_path is not a .ipynb file. + """ + logger.info( + "Starting notebook execution with JupyterExecuteNotebookWithParametersTool." + ) + logger.debug( + "notebook_path: %s, output_notebook_path: %s, params: %s", + notebook_path, + output_notebook_path, + params, + ) + + if not notebook_path.endswith(".ipynb"): + error_message = "The specified notebook_path is not a .ipynb file." + logger.error(error_message) + return {"error": error_message} + + if not output_notebook_path.endswith(".ipynb"): + error_message = "The specified output_notebook_path is not a .ipynb file." + logger.error(error_message) + return {"error": error_message} + + try: + pm.execute_notebook( + input_path=notebook_path, + output_path=output_notebook_path, + parameters=params or {}, + ) + logger.info("Notebook execution completed successfully.") + return {"executed_notebook": output_notebook_path} + except Exception as e: + error_message = f"An error occurred during notebook execution: {str(e)}" + logger.error(error_message) + return {"error": error_message} diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/swarmauri_tool_jupyterexecutenotebookwithparameters/__init__.py b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/swarmauri_tool_jupyterexecutenotebookwithparameters/__init__.py new file mode 100644 index 000000000..36c109daf --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/swarmauri_tool_jupyterexecutenotebookwithparameters/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterexecutenotebookwithparameters.JupyterExecuteNotebookWithParametersTool import ( + JupyterExecuteNotebookWithParametersTool, +) + + +__all__ = ["JupyterExecuteNotebookWithParametersTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterexecutenotebookwithparameters") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/tests/unit/test_JupyterExecuteNotebookWithParametersTool.py b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/tests/unit/test_JupyterExecuteNotebookWithParametersTool.py new file mode 100644 index 000000000..0c084388f --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/tests/unit/test_JupyterExecuteNotebookWithParametersTool.py @@ -0,0 +1,107 @@ +""" +test_JupyterExecuteNotebookWithParametersTool.py + +This module contains the pytest test cases for the JupyterExecuteNotebookWithParametersTool class. +It verifies that the tool behaves correctly, including attribute checks, parameter handling, +file path validations, and error handling during notebook execution. +""" + +import pytest +from unittest.mock import patch, MagicMock +from swarmauri_tool_jupyterexecutenotebookwithparameters.JupyterExecuteNotebookWithParametersTool import ( + JupyterExecuteNotebookWithParametersTool, +) + + +@pytest.fixture +def tool_instance() -> JupyterExecuteNotebookWithParametersTool: + """ + Provides an instance of the JupyterExecuteNotebookWithParametersTool for testing. + """ + return JupyterExecuteNotebookWithParametersTool() + + +def test_class_attributes( + tool_instance: JupyterExecuteNotebookWithParametersTool, +) -> None: + """ + Test that the class attributes match expected default values. + """ + assert tool_instance.version == "1.0.0" + assert tool_instance.name == "JupyterExecuteNotebookWithParametersTool" + assert tool_instance.type == "JupyterExecuteNotebookWithParametersTool" + assert len(tool_instance.parameters) == 3 + + +def test_call_incorrect_notebook_extension( + tool_instance: JupyterExecuteNotebookWithParametersTool, +) -> None: + """ + Test that calling the tool with a non-ipynb notebook_path returns an error. + """ + result = tool_instance( + notebook_path="invalid_file.txt", output_notebook_path="out.ipynb" + ) + assert "error" in result + assert "not a .ipynb file" in result["error"] + + +def test_call_incorrect_output_extension( + tool_instance: JupyterExecuteNotebookWithParametersTool, +) -> None: + """ + Test that calling the tool with a non-ipynb output_notebook_path returns an error. + """ + result = tool_instance( + notebook_path="notebook.ipynb", output_notebook_path="out.txt" + ) + assert "error" in result + assert "not a .ipynb file" in result["error"] + + +@patch( + "swarmauri_tool_jupyterexecutenotebookwithparameters.JupyterExecuteNotebookWithParametersTool.pm.execute_notebook" +) +def test_call_execution_success( + mock_execute_notebook: MagicMock, + tool_instance: JupyterExecuteNotebookWithParametersTool, +) -> None: + """ + Test that calling the tool with valid paths calls papermill.execute_notebook + and returns the path to the executed notebook. + """ + mock_execute_notebook.return_value = None # Simulate successful execution + result = tool_instance( + notebook_path="notebook.ipynb", + output_notebook_path="executed_notebook.ipynb", + params={"key": "value"}, + ) + assert "executed_notebook" in result + assert result["executed_notebook"] == "executed_notebook.ipynb" + mock_execute_notebook.assert_called_once_with( + input_path="notebook.ipynb", + output_path="executed_notebook.ipynb", + parameters={"key": "value"}, + ) + + +@patch( + "swarmauri_tool_jupyterexecutenotebookwithparameters.JupyterExecuteNotebookWithParametersTool.pm.execute_notebook" +) +def test_call_execution_failure( + mock_execute_notebook: MagicMock, + tool_instance: JupyterExecuteNotebookWithParametersTool, +) -> None: + """ + Test that an exception raised during papermill.execute_notebook is handled and + returns an 'error' in the result dictionary. + """ + mock_execute_notebook.side_effect = Exception("Simulated execution error") + result = tool_instance( + notebook_path="notebook.ipynb", + output_notebook_path="executed_notebook.ipynb", + params={"key": "value"}, + ) + assert "error" in result + assert "Simulated execution error" in result["error"] + mock_execute_notebook.assert_called_once() diff --git a/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/tests/unit/test___init__.py new file mode 100644 index 000000000..b5f32e4f6 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexecutenotebookwithparameters/tests/unit/test___init__.py @@ -0,0 +1,30 @@ +""" +Unit tests for the package's initialization. Ensures that the __init__ module +correctly exposes the JupyterExecuteNotebookWithParametersTool class and version. +""" + +from typing import Any + +from swarmauri_tool_jupyterexecutenotebookwithparameters import ( + JupyterExecuteNotebookWithParametersTool, + __version__, +) + + +def test_jupyter_execute_notebook_with_parameters_tool_import() -> None: + """ + Test that the JupyterExecuteNotebookWithParametersTool is successfully + imported from the package's __init__.py file. + """ + tool_instance: Any = JupyterExecuteNotebookWithParametersTool() + assert isinstance(tool_instance, JupyterExecuteNotebookWithParametersTool), ( + "JupyterExecuteNotebookWithParametersTool should be an instance of the specified class" + ) + + +def test_package_version_exists() -> None: + """ + Test that the __version__ attribute is defined in the package's __init__.py. + """ + assert __version__ is not None, "The package should define a __version__ attribute" + assert isinstance(__version__, str), "The package's __version__ should be a string" diff --git a/pkgs/community/swarmauri_tool_jupyterexporthtml/LICENSE b/pkgs/community/swarmauri_tool_jupyterexporthtml/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexporthtml/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterexporthtml/README.md b/pkgs/community/swarmauri_tool_jupyterexporthtml/README.md new file mode 100644 index 000000000..cbc86996b --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexporthtml/README.md @@ -0,0 +1,118 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterexporthtml +

+ +--- + +# swarmauri_tool_jupyterexporthtml + +The purpose of this package is to provide a flexible tool for converting Jupyter Notebook content into HTML format using nbconvert’s HTMLExporter. This enables easy presentation, sharing, or embedding of Jupyter Notebook content into various workflows or web applications. + +## Installation + +Since this package is published for Python 3.10 to 3.13, you will need a recent version of Python. To install: + +1. Ensure you have the required dependencies, including nbconvert, installed. +2. Run the following command in your preferred Python environment to install from PyPI: + + pip install swarmauri_tool_jupyterexporthtml + +3. You’re ready to begin using the JupyterExportHTMLTool in your projects. + +### Dependencies +• nbconvert (for converting Notebook content into HTML) +• swarmauri_core, swarmauri_base (for Swarmauri framework integration) + +These are automatically installed when you install this package. + +## Usage + +Below is a short guide showing how to import and use the tool in your own Python code. The JupyterExportHTMLTool accepts a JSON-formatted string representing the notebook data, plus optional template, CSS, and JavaScript parameters. + +Example usage: + +-------------------------------------------------------------------------------- + +from swarmauri_tool_jupyterexporthtml import JupyterExportHTMLTool + +# Example Notebook JSON (this would typically be read from a file or other source) +notebook_json_str = ''' +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Hello World\\n", + "This is a sample notebook cell." + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} +''' + +# Create an instance of the exporter +exporter = JupyterExportHTMLTool() + +# Call the exporter with optional parameters +result = exporter( + notebook_json=notebook_json_str, + template_file=None, # or specify a path to a custom template + extra_css="body { font-family: Arial, sans-serif; }", + extra_js="console.log('HTML Export Ready!');" +) + +# Check for success +if "exported_html" in result: + # Save or process the HTML output + html_content = result["exported_html"] + print("Notebook conversion to HTML was successful. Length of output:", len(html_content)) +else: + # Handle any error messages + print("Error during export:", result.get("error")) + +-------------------------------------------------------------------------------- + +### Method Summary + +The JupyterExportHTMLTool class implements a callable interface, expecting parameters such as: +• notebook_json (required): A string containing Jupyter Notebook data in valid JSON format. +• template_file (optional): Path to an nbconvert-compatible template. +• extra_css (optional): Inline styles you wish to embed into the resulting HTML. +• extra_js (optional): Inline JavaScript you wish to embed before the closing body tag. + +It returns a dictionary containing either “exported_html” with the final HTML string or “error” with a message describing any failure. + +## Advanced Usage + +Use a custom template to further influence how the notebook is rendered. For instance, with an nbconvert template: + +result = exporter( + notebook_json=notebook_json_str, + template_file="custom_template.tpl", + extra_css="header { color: blue; }", + extra_js="alert('Notebook Conversion Complete!');" +) + +The extra_css parameter is injected inside a \n" + body = body.replace("", css_tag, 1) + + # If extra JS is provided, embed it before the closing tag + if extra_js: + js_tag = f"\n" + body = body.replace("", js_tag, 1) + + logging.info("Notebook successfully exported to HTML.") + return {"exported_html": body} + + except Exception as e: + logging.error("Error exporting notebook to HTML: %s", e) + return {"error": f"An error occurred: {str(e)}"} diff --git a/pkgs/community/swarmauri_tool_jupyterexporthtml/swarmauri_tool_jupyterexporthtml/__init__.py b/pkgs/community/swarmauri_tool_jupyterexporthtml/swarmauri_tool_jupyterexporthtml/__init__.py new file mode 100644 index 000000000..d6e41fc7c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexporthtml/swarmauri_tool_jupyterexporthtml/__init__.py @@ -0,0 +1,17 @@ +from swarmauri_tool_jupyterexporthtml.JupyterExportHTMLTool import JupyterExportHTMLTool + + +__all__ = ["JupyterExportHTMLTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterexporthtml") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterexporthtml/tests/unit/test_JupyterExportHTMLTool.py b/pkgs/community/swarmauri_tool_jupyterexporthtml/tests/unit/test_JupyterExportHTMLTool.py new file mode 100644 index 000000000..49be7a3ec --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexporthtml/tests/unit/test_JupyterExportHTMLTool.py @@ -0,0 +1,136 @@ +""" +test_JupyterExportHTMLTool.py + +This module contains pytest-based test cases for the JupyterExportHTMLTool class, ensuring that +the tool correctly converts Jupyter Notebooks into HTML and properly handles optional parameters +such as template files, extra CSS, and extra JS. +""" + +import pytest +from typing import Dict, Any + +from swarmauri_tool_jupyterexporthtml.JupyterExportHTMLTool import JupyterExportHTMLTool + + +@pytest.fixture +def valid_notebook_json() -> str: + """ + Provides a minimal valid JSON representation of a Jupyter Notebook for testing. + """ + return ( + '{"cells": [{"cell_type": "markdown", "metadata": {}, "source": ["# Test Notebook"]}],' + '"metadata": {}, "nbformat": 4, "nbformat_minor": 5}' + ) + + +@pytest.fixture +def invalid_notebook_json() -> str: + """ + Provides an intentionally invalid JSON string to test error handling. + """ + return '{"cells": "this is invalid notebook data"' + + +def test_jupyter_export_html_tool_instantiation() -> None: + """ + Tests that the JupyterExportHTMLTool can be instantiated without errors. + """ + tool = JupyterExportHTMLTool() + assert tool.name == "JupyterExportHTMLTool" + assert tool.type == "JupyterExportHTMLTool" + assert tool.version == "1.0.0" + + +def test_jupyter_export_html_tool_parameters() -> None: + """ + Tests that the tool's parameters are defined correctly. + """ + tool = JupyterExportHTMLTool() + param_names = [p.name for p in tool.parameters] + assert "notebook_json" in param_names + assert "template_file" in param_names + assert "extra_css" in param_names + assert "extra_js" in param_names + + +def test_jupyter_export_html_tool_call_basic(valid_notebook_json: str) -> None: + """ + Tests calling the JupyterExportHTMLTool with a minimal valid notebook JSON, + ensuring that it returns a dictionary containing the exported HTML. + """ + tool = JupyterExportHTMLTool() + result = tool(notebook_json=valid_notebook_json) + assert isinstance(result, dict), "Result should be a dictionary." + assert "exported_html" in result, "The result should contain 'exported_html'." + assert " tag." + ) + + +def test_jupyter_export_html_tool_call_with_template(valid_notebook_json: str) -> None: + """ + Tests that providing a template file path is accepted. Since we cannot + actually load a custom template in a test environment by default, we only + check that the method runs without raising an exception. + """ + tool = JupyterExportHTMLTool() + result = tool(notebook_json=valid_notebook_json, template_file="dummy_template.tpl") + assert isinstance(result, dict), "Result should be a dictionary." + # Not testing actual template application here, as we cannot load a real template in this test. + + +def test_jupyter_export_html_tool_call_with_extra_css(valid_notebook_json: str) -> None: + """ + Tests that an inline CSS string is correctly embedded in the exported HTML. + """ + tool = JupyterExportHTMLTool() + css_content = "body { background-color: #EEE; }" + result = tool(notebook_json=valid_notebook_json, extra_css=css_content) + assert "exported_html" in result, "The result should contain 'exported_html'." + assert css_content in result["exported_html"], ( + "Extra CSS should be present in the exported HTML." + ) + + +def test_jupyter_export_html_tool_call_with_extra_js(valid_notebook_json: str) -> None: + """ + Tests that an inline JavaScript string is correctly embedded in the exported HTML. + """ + tool = JupyterExportHTMLTool() + js_content = "console.log('Test JS');" + result = tool(notebook_json=valid_notebook_json, extra_js=js_content) + assert "exported_html" in result, "The result should contain 'exported_html'." + assert js_content in result["exported_html"], ( + "Extra JS should be present in the exported HTML." + ) + + +def test_jupyter_export_html_tool_failure_with_invalid_json( + invalid_notebook_json: str, +) -> None: + """ + Tests that providing invalid JSON triggers an error during export. + """ + tool = JupyterExportHTMLTool() + result = tool(notebook_json=invalid_notebook_json) + assert "error" in result, ( + "The result should contain an 'error' key if notebook JSON is invalid." + ) + assert "An error occurred" in result["error"], ( + "The error message should indicate that an error occurred." + ) + + +def test_jupyter_export_html_tool_required_parameter() -> None: + """ + Ensures that the notebook_json parameter is indeed required and an error-like + result is returned if it is missing or None. The tool uses Pydantic modeling, + which may raise exceptions if required parameters are missing. + """ + tool = JupyterExportHTMLTool() + try: + result: Dict[str, Any] = tool(notebook_json=None) # type: ignore + assert "error" in result, "Expecting an error due to missing notebook_json." + except Exception as exc: + # If Pydantic raises an exception, this is also acceptable behavior + assert "field required" in str(exc), "Missing required field error expected." diff --git a/pkgs/community/swarmauri_tool_jupyterexporthtml/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterexporthtml/tests/unit/test___init__.py new file mode 100644 index 000000000..8e9096c8a --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexporthtml/tests/unit/test___init__.py @@ -0,0 +1,37 @@ +""" +Module containing pytest tests for the package initialization. + +This module tests that the swarmauri_tool_jupyterexporthtml package +exposes the necessary components through its __init__.py. +""" + +from swarmauri_tool_jupyterexporthtml import JupyterExportHTMLTool, __version__ + + +class TestPackageInit: + """ + A test suite for ensuring the package initialization works as expected. + """ + + def test_jupyter_export_html_tool_import(self) -> None: + """ + Test that the JupyterExportHTMLTool can be imported from the package. + + This test ensures that we're able to import the JupyterExportHTMLTool + class from the main package __init__.py, verifying that it is correctly + exposed by the package initialization. + """ + assert JupyterExportHTMLTool is not None, ( + "JupyterExportHTMLTool should be defined and imported from the " + "swarmauri_tool_jupyterexporthtml package." + ) + + def test_package_version_import(self) -> None: + """ + Test that the __version__ attribute is available in the package. + + This test verifies that the __version__ attribute is correctly + imported and is a non-empty string. + """ + assert isinstance(__version__, str), "Package __version__ should be a string." + assert __version__, "Package __version__ should not be empty." diff --git a/pkgs/community/swarmauri_tool_jupyterexportlatex/LICENSE b/pkgs/community/swarmauri_tool_jupyterexportlatex/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportlatex/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterexportlatex/README.md b/pkgs/community/swarmauri_tool_jupyterexportlatex/README.md new file mode 100644 index 000000000..fefc0032d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportlatex/README.md @@ -0,0 +1,323 @@ +![Swamauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterexportlatex +

+ +--- + +# swarmauri_tool_jupyterexportlatex + +A tool that exports a Jupyter Notebook to LaTeX format using nbconvert’s LatexExporter. This tool can optionally convert the produced LaTeX into PDF form, making it easy to prepare high-quality, publication-ready documents. + +## Installation + +This package requires Python 3.10 or above. + +• To install from PyPI using pip: + + pip install swarmauri_tool_jupyterexportlatex + +• Ensure that nbconvert is installed (it will be automatically installed with this package if you are using pip). + +• This package also relies on the swarmauri_core and swarmauri_base packages, which are automatically installed when you install swarmauri_tool_jupyterexportlatex from PyPI. + +Once installed, you will have access to the JupyterExportLatexTool class, which provides a straightforward way to convert your NotebookNode objects into LaTeX or PDF. + +## Usage + +1. Import the tool into your Python script or Jupyter notebook: + + from swarmauri_tool_jupyterexportlatex import JupyterExportLatexTool + +2. Construct an instance of the tool: + + tool = JupyterExportLatexTool() + +3. Provide a valid nbformat.NotebookNode object alongside optional parameters: + + output = tool( + notebook_node, + use_custom_template=False, + template_path=None, + to_pdf=True + ) + + Here: + • notebook_node: a valid nbformat.NotebookNode object representing your Jupyter notebook. + • use_custom_template (bool): set to True if you have a custom LaTeX template file. + • template_path (str): an optional custom LaTeX template path if use_custom_template is True. + • to_pdf (bool): set to True if you want to generate a PDF in addition to the LaTeX output. + +4. Handling the return structure: + + The method returns a dictionary which may contain: + • "latex_content": The LaTeX output string. + • "pdf_file_path": A temporary path to the generated PDF if to_pdf=True. + • "error": An error message if any exception occurred during export. + +### Example + +Suppose you already have a loaded NotebookNode object named my_notebook: + +from nbformat import read +import io + +# Pretend we have notebook data in 'notebook_str' +notebook_str = """{ "cells": [], "metadata": {}, "nbformat": 4, "nbformat_minor": 5 }""" +my_notebook = read(io.StringIO(notebook_str), as_version=4) + +tool = JupyterExportLatexTool() +export_result = tool(my_notebook, to_pdf=True) + +if "error" in export_result: + print("Error:", export_result["error"]) +else: + latex_content = export_result.get("latex_content", "") + print("LaTeX content:\n", latex_content) + pdf_path = export_result.get("pdf_file_path") + if pdf_path: + print("PDF was successfully generated at:", pdf_path) + +--- + +## Dependencies + +Below are the main files that comprise this package: + +### pkgs/swarmauri_tool_jupyterexportlatex/swarmauri_tool_jupyterexportlatex/JupyterExportLatexTool.py +``` +""" +JupyterExportLatexTool.py + +This module defines the JupyterExportLatexTool, a component that converts Jupyter notebooks +(NotebookNode objects) into LaTeX format. It supports custom LaTeX templates, logs the export +process, handles conversion errors, and can optionally produce a PDF. This tool is designed +to meet academic publication standards. +""" + +from typing import List, Literal, Dict, Any, Optional +from pydantic import Field +from nbformat import NotebookNode +from nbconvert import LatexExporter, PDFExporter +from nbconvert.writers import FilesWriter +import os +import tempfile + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +@ComponentBase.register_type(ToolBase, 'JupyterExportLatexTool') +class JupyterExportLatexTool(ToolBase): + """ + JupyterExportLatexTool is responsible for converting a Jupyter Notebook (NotebookNode) + into a LaTeX document. It supports using a custom LaTeX template, can log and handle + conversion errors, and optionally convert the generated LaTeX to PDF. + + Attributes: + version (str): The version of the JupyterExportLatexTool. + parameters (List[Parameter]): A list of parameters required to perform the export. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterExportLatexTool"]): The type identifier for the tool. + """ + version: str = "0.1.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_node", + type="object", + description="A nbformat.NotebookNode object representing a Jupyter notebook.", + required=True + ), + Parameter( + name="use_custom_template", + type="boolean", + description="Whether or not to use a custom LaTeX template.", + required=False + ), + Parameter( + name="template_path", + type="string", + description="Path to a custom LaTeX template if use_custom_template is True.", + required=False + ), + Parameter( + name="to_pdf", + type="boolean", + description="If True, also convert the LaTeX output to PDF.", + required=False + ), + ] + ) + name: str = "JupyterExportLatexTool" + description: str = "Converts Jupyter notebooks to LaTeX and optionally to PDF for academic publication." + type: Literal["JupyterExportLatexTool"] = "JupyterExportLatexTool" + + def __call__( + self, + notebook_node: NotebookNode, + use_custom_template: bool = False, + template_path: Optional[str] = None, + to_pdf: bool = False + ) -> Dict[str, Any]: + """ + Converts a Jupyter notebook (NotebookNode) into LaTeX format, optionally using a + custom template, and returns the resulting LaTeX content. This method can also + generate a PDF version if requested. + + Args: + notebook_node (NotebookNode): The Jupyter NotebookNode to convert. + use_custom_template (bool, optional): Whether to apply a custom LaTeX template. + template_path (str, optional): Custom template path if use_custom_template is True. + to_pdf (bool, optional): If True, the method will also convert the LaTeX to a PDF file. + + Returns: + Dict[str, Any]: A dictionary containing: + - "latex_content": The generated LaTeX content as a string. + - "pdf_file_path": The file path to the generated PDF if to_pdf is True. + - "error": An error message if any exception occurred. + + Example: + >>> tool = JupyterExportLatexTool() + >>> latex_output = tool(notebook_node, False, None, False) + >>> print(latex_output["latex_content"]) + """ + try: + # Create the LaTeX exporter + latex_exporter = LatexExporter() + if use_custom_template and template_path: + latex_exporter.template_file = template_path + + # Convert the notebook to LaTeX + body, _ = latex_exporter.from_notebook_node(notebook_node) + + result: Dict[str, Any] = {"latex_content": body} + + # If user requested PDF export, attempt to convert the LaTeX to PDF + if to_pdf: + pdf_exporter = PDFExporter() + if use_custom_template and template_path: + pdf_exporter.template_file = template_path + + # Use a temporary directory for PDF conversion + with tempfile.TemporaryDirectory() as temp_dir: + pdf_exporter.output_filename = "converted_notebook.pdf" + pdf_data, _ = pdf_exporter.from_notebook_node(notebook_node) + + # Write the PDF file to disk + writer = FilesWriter(build_directory=temp_dir) + writer.write(pdf_data, pdf_exporter.output_filename) + + pdf_path = os.path.join(temp_dir, pdf_exporter.output_filename) + + result["pdf_file_path"] = pdf_path + + return result + except Exception as e: + return {"error": f"An error occurred during LaTeX export: {str(e)}"} + +``` + +### pkgs/swarmauri_tool_jupyterexportlatex/swarmauri_tool_jupyterexportlatex/__init__.py +``` +from swarmauri_tool_jupyterexportlatex.JupyterExportLatexTool import JupyterExportLatexTool + + +__all__ = [ "JupyterExportLatexTool" ] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterexportlatex") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" + +``` + +### pkgs/swarmauri_tool_jupyterexportlatex/pyproject.toml +``` +[tool.poetry] +name = "swarmauri_tool_jupyterexportlatex" +version = "0.6.1.dev7" +description = "A tool that exports a Jupyter Notebook to LaTeX format using nbconvert’s LatexExporter, enabling further conversion to PDF." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "https://github.com/swarmauri/swarmauri-sdk/tree/mono/dev/pkgs/swarmauri_tool_jupyterexportlatex/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri repositories +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base" } +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbconvert = "*" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterexportlatextool = "swarmauri_tool_jupyterexportlatex:JupyterExportLatexTool" + +``` + +## License + +Apache 2.0 + +--- + +Feel free to explore the provided examples and code to see how you can tailor the export of your Jupyter notebooks for academic publications, internal documentation, or any other use case that requires a clean LaTeX output. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexportlatex/payload.json b/pkgs/community/swarmauri_tool_jupyterexportlatex/payload.json new file mode 100644 index 000000000..14c8aea2a --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportlatex/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool that exports a Jupyter Notebook to LaTeX format using nbconvert\u2019s LatexExporter, enabling further conversion to PDF.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterexportlatex", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterExportLatexTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Convert a NotebookNode into LaTeX format.", + "Support custom LaTeX templates.", + "Log export operations and handle conversion errors.", + "Return the LaTeX content for further processing.", + "Facilitate conversion to PDF with external tools.", + "Validate formatting for academic publication standards." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbconvert", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexportlatex/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterexportlatex/pyproject.toml new file mode 100644 index 000000000..b17c9bba9 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportlatex/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterexportlatex" +version = "0.6.1" +description = "A tool that exports a Jupyter Notebook to LaTeX format using nbconvert’s LatexExporter, enabling further conversion to PDF." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "https://github.com/swarmauri/swarmauri-sdk/tree/mono/dev/pkgs/swarmauri_tool_jupyterexportlatex/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri repositories +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base" } +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbconvert = "^7.16.6" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterexportlatextool = "swarmauri_tool_jupyterexportlatex:JupyterExportLatexTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexportlatex/swarmauri_tool_jupyterexportlatex/JupyterExportLatexTool.py b/pkgs/community/swarmauri_tool_jupyterexportlatex/swarmauri_tool_jupyterexportlatex/JupyterExportLatexTool.py new file mode 100644 index 000000000..33074c53a --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportlatex/swarmauri_tool_jupyterexportlatex/JupyterExportLatexTool.py @@ -0,0 +1,132 @@ +""" +JupyterExportLatexTool.py + +This module defines the JupyterExportLatexTool, a component that converts Jupyter notebooks +(NotebookNode objects) into LaTeX format. It supports custom LaTeX templates, logs the export +process, handles conversion errors, and can optionally produce a PDF. This tool is designed +to meet academic publication standards. +""" + +from typing import List, Literal, Dict, Any, Optional +from pydantic import Field +from nbformat import NotebookNode +from nbconvert import LatexExporter, PDFExporter +from nbconvert.writers import FilesWriter +import os +import tempfile + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +@ComponentBase.register_type(ToolBase, "JupyterExportLatexTool") +class JupyterExportLatexTool(ToolBase): + """ + JupyterExportLatexTool is responsible for converting a Jupyter Notebook (NotebookNode) + into a LaTeX document. It supports using a custom LaTeX template, can log and handle + conversion errors, and optionally convert the generated LaTeX to PDF. + + Attributes: + version (str): The version of the JupyterExportLatexTool. + parameters (List[Parameter]): A list of parameters required to perform the export. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterExportLatexTool"]): The type identifier for the tool. + """ + + version: str = "0.1.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_node", + type="object", + description="A nbformat.NotebookNode object representing a Jupyter notebook.", + required=True, + ), + Parameter( + name="use_custom_template", + type="boolean", + description="Whether or not to use a custom LaTeX template.", + required=False, + ), + Parameter( + name="template_path", + type="string", + description="Path to a custom LaTeX template if use_custom_template is True.", + required=False, + ), + Parameter( + name="to_pdf", + type="boolean", + description="If True, also convert the LaTeX output to PDF.", + required=False, + ), + ] + ) + name: str = "JupyterExportLatexTool" + description: str = "Converts Jupyter notebooks to LaTeX and optionally to PDF for academic publication." + type: Literal["JupyterExportLatexTool"] = "JupyterExportLatexTool" + + def __call__( + self, + notebook_node: NotebookNode, + use_custom_template: bool = False, + template_path: Optional[str] = None, + to_pdf: bool = False, + ) -> Dict[str, Any]: + """ + Converts a Jupyter notebook (NotebookNode) into LaTeX format, optionally using a + custom template, and returns the resulting LaTeX content. This method can also + generate a PDF version if requested. + + Args: + notebook_node (NotebookNode): The Jupyter NotebookNode to convert. + use_custom_template (bool, optional): Whether to apply a custom LaTeX template. + template_path (str, optional): Custom template path if use_custom_template is True. + to_pdf (bool, optional): If True, the method will also convert the LaTeX to a PDF file. + + Returns: + Dict[str, Any]: A dictionary containing: + - "latex_content": The generated LaTeX content as a string. + - "pdf_file_path": The file path to the generated PDF if to_pdf is True. + - "error": An error message if any exception occurred. + + Example: + >>> tool = JupyterExportLatexTool() + >>> latex_output = tool(notebook_node, False, None, False) + >>> print(latex_output["latex_content"]) + """ + try: + # Create the LaTeX exporter + latex_exporter = LatexExporter() + if use_custom_template and template_path: + latex_exporter.template_file = template_path + + # Convert the notebook to LaTeX + body, _ = latex_exporter.from_notebook_node(notebook_node) + + result: Dict[str, Any] = {"latex_content": body} + + # If user requested PDF export, attempt to convert the LaTeX to PDF + if to_pdf: + pdf_exporter = PDFExporter() + if use_custom_template and template_path: + pdf_exporter.template_file = template_path + + # Use a temporary directory for PDF conversion + with tempfile.TemporaryDirectory() as temp_dir: + pdf_exporter.output_filename = "converted_notebook.pdf" + pdf_data, _ = pdf_exporter.from_notebook_node(notebook_node) + + # Write the PDF file to disk + writer = FilesWriter(build_directory=temp_dir) + writer.write(pdf_data, pdf_exporter.output_filename) + + pdf_path = os.path.join(temp_dir, pdf_exporter.output_filename) + + result["pdf_file_path"] = pdf_path + + return result + except Exception as e: + return {"error": f"An error occurred during LaTeX export: {str(e)}"} diff --git a/pkgs/community/swarmauri_tool_jupyterexportlatex/swarmauri_tool_jupyterexportlatex/__init__.py b/pkgs/community/swarmauri_tool_jupyterexportlatex/swarmauri_tool_jupyterexportlatex/__init__.py new file mode 100644 index 000000000..0e8aa0458 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportlatex/swarmauri_tool_jupyterexportlatex/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterexportlatex.JupyterExportLatexTool import ( + JupyterExportLatexTool, +) + + +__all__ = ["JupyterExportLatexTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterexportlatex") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterexportlatex/tests/unit/test_JupyterExportLatexTool.py b/pkgs/community/swarmauri_tool_jupyterexportlatex/tests/unit/test_JupyterExportLatexTool.py new file mode 100644 index 000000000..e289803fe --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportlatex/tests/unit/test_JupyterExportLatexTool.py @@ -0,0 +1,134 @@ +""" +test_JupyterExportLatexTool.py + +This module contains pytest-based test cases for verifying the functionality of the +JupyterExportLatexTool class. It checks the correctness of the LaTeX conversion process +and optional PDF generation, as well as error handling for problematic inputs. +""" + +import os +import pytest +from typing import Dict, Any +from nbformat.v4.nbbase import new_notebook +from swarmauri_tool_jupyterexportlatex.JupyterExportLatexTool import ( + JupyterExportLatexTool, +) +from swarmauri_base.tools.ToolBase import ToolBase + + +@pytest.fixture +def sample_notebook_node() -> Any: + """ + Provides a sample NotebookNode object for testing. + + Returns: + A simple NotebookNode containing one empty cell. + """ + nb = new_notebook() + nb.cells.append({"cell_type": "code", "source": [], "metadata": {}, "outputs": []}) + return nb + + +def test_jupyter_export_latex_tool_inheritance() -> None: + """ + Ensures that JupyterExportLatexTool inherits from the ToolBase class. + """ + assert issubclass(JupyterExportLatexTool, ToolBase), ( + "JupyterExportLatexTool does not inherit from ToolBase as expected." + ) + + +def test_jupyter_export_latex_tool_init() -> None: + """ + Tests default initialization of the JupyterExportLatexTool class. + Verifies that the default attributes match expected values. + """ + tool = JupyterExportLatexTool() + assert tool.version == "0.1.0", "Default version should be '0.1.0'." + assert tool.name == "JupyterExportLatexTool", ( + "Tool name should match its class name." + ) + assert tool.type == "JupyterExportLatexTool", ( + "Tool type should be 'JupyterExportLatexTool'." + ) + + +def test_conversion_no_custom_template_no_pdf(sample_notebook_node: Any) -> None: + """ + Tests the LaTeX conversion process without a custom template and without PDF generation. + Ensures that the returned dictionary contains LaTeX content and no errors. + """ + tool = JupyterExportLatexTool() + result: Dict[str, Any] = tool( + notebook_node=sample_notebook_node, use_custom_template=False, to_pdf=False + ) + assert "error" not in result, ( + f"Error returned unexpectedly: {result.get('error', '')}" + ) + assert "latex_content" in result, "Expected 'latex_content' key in the result." + assert result["latex_content"], "LaTeX content should not be empty." + + +def test_conversion_no_custom_template_with_pdf(sample_notebook_node: Any) -> None: + """ + Tests the LaTeX conversion process without a custom template and with PDF generation. + Ensures that the returned dictionary includes a PDF path. + """ + tool = JupyterExportLatexTool() + result: Dict[str, Any] = tool( + notebook_node=sample_notebook_node, use_custom_template=False, to_pdf=True + ) + assert "error" not in result, ( + f"Error returned unexpectedly: {result.get('error', '')}" + ) + assert "latex_content" in result, "Missing 'latex_content' in the result." + assert "pdf_file_path" in result, "Missing 'pdf_file_path' in the result." + assert os.path.isfile(result["pdf_file_path"]), ( + "The PDF file path does not point to a valid file." + ) + + +def test_conversion_with_custom_template(sample_notebook_node: Any, tmp_path) -> None: + """ + Tests the LaTeX conversion process with a custom template. Verifies that the tool + accepts a template path and processes the notebook without raising an error. + + Args: + sample_notebook_node (Any): A fixture providing a minimal NotebookNode. + tmp_path: A pytest fixture providing a temporary directory. + """ + # Create a dummy template file + custom_template = tmp_path / "custom.tplx" + custom_template.write_text("Some custom LaTeX template content") + + tool = JupyterExportLatexTool() + result: Dict[str, Any] = tool( + notebook_node=sample_notebook_node, + use_custom_template=True, + template_path=str(custom_template), + to_pdf=False, + ) + assert "error" not in result, ( + f"Error returned unexpectedly with custom template: {result.get('error', '')}" + ) + assert "latex_content" in result, ( + "Expected 'latex_content' key in the result (custom template)." + ) + + +def test_conversion_error_handling() -> None: + """ + Tests that the tool handles conversion errors gracefully and returns an error message + when given invalid input instead of a NotebookNode. + """ + tool = JupyterExportLatexTool() + result: Dict[str, Any] = tool( + notebook_node=None, # Invalid + use_custom_template=False, + template_path=None, + to_pdf=False, + ) + assert "error" in result, "Expected an error message for invalid notebook input." + assert "latex_content" not in result, ( + "There should be no 'latex_content' for invalid input." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterexportlatex/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterexportlatex/tests/unit/test___init__.py new file mode 100644 index 000000000..bd97aa429 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportlatex/tests/unit/test___init__.py @@ -0,0 +1,29 @@ +""" +Unit tests for verifying the package initialization of swarmauri_tool_jupyterexportlatex. + +This module ensures the __init__.py file correctly exposes the JupyterExportLatexTool class +and the __version__ attribute, validating the package setup and exports. +""" + +from typing import Any +from swarmauri_tool_jupyterexportlatex import JupyterExportLatexTool, __version__ + + +def test_jupyter_export_latex_tool_imported_properly() -> None: + """ + Test that JupyterExportLatexTool can be imported and instantiated without errors. + + Verifies that the class is exposed properly through the package's __init__.py. + """ + tool: Any = JupyterExportLatexTool() + assert tool is not None, "JupyterExportLatexTool was not properly instantiated." + + +def test_package_version_is_exposed() -> None: + """ + Test that the __version__ attribute is exposed and is a non-empty string. + + Validates the package's versioning information as part of initialization. + """ + assert isinstance(__version__, str), "__version__ should be a string." + assert __version__, "__version__ should not be empty." diff --git a/pkgs/community/swarmauri_tool_jupyterexportmarkdown/LICENSE b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterexportmarkdown/README.md b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/README.md new file mode 100644 index 000000000..153884e59 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/README.md @@ -0,0 +1,30 @@ + + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterexportmarkdown +

+ +--- + +# swarmauri_tool_jupyterexportmarkdown diff --git a/pkgs/community/swarmauri_tool_jupyterexportmarkdown/payload.json b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/payload.json new file mode 100644 index 000000000..764eb7d0d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool that exports a Jupyter Notebook to Markdown format using nbconvert\u2019s MarkdownExporter, suitable for documentation and static site generation.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterexportmarkdown", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterExportMarkdownTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Convert a NotebookNode into Markdown format.", + "Support custom Markdown templates and styling.", + "Log export operations and potential errors.", + "Ensure proper formatting of code and outputs.", + "Return the generated Markdown content.", + "Facilitate integration with static site generators." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbconvert", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexportmarkdown/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/pyproject.toml new file mode 100644 index 000000000..bf870db63 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/pyproject.toml @@ -0,0 +1,52 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterexportmarkdown" +version = "1.0.1" +description = "A Swarmauri tool designed to export Jupyter Notebooks to Markdown." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "https://github.com/swarmauri/swarmauri-sdk" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" +papermill = "^2.6.0" +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base" } +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +swarmauri_tool_jupyterexportmarkdown = "swarmauri_tool_jupyterexportmarkdown:JupyterExportMarkdownTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexportmarkdown/swarmauri_tool_jupyterexportmarkdown/JupyterExportMarkdownTool.py b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/swarmauri_tool_jupyterexportmarkdown/JupyterExportMarkdownTool.py new file mode 100644 index 000000000..e5ee87c88 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/swarmauri_tool_jupyterexportmarkdown/JupyterExportMarkdownTool.py @@ -0,0 +1,151 @@ +""" +JupyterExportMarkdownTool.py + +This module defines the JupyterExportMarkdownTool, a component that converts a Jupyter +Notebook into Markdown format. It demonstrates how to inherit from the ToolBase class +and integrate with the swarmauri framework. The tool supports custom templates, styling, +and logs export operations to help track usage and potential errors. +""" + +import logging +from typing import List, Literal, Dict, Optional, Any +from pydantic import Field + +import nbformat +from nbconvert import MarkdownExporter + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterExportMarkdownTool") +class JupyterExportMarkdownTool(ToolBase): + """ + JupyterExportMarkdownTool converts a Jupyter Notebook (represented as a NotebookNode or JSON-like + structure) into Markdown format. It supports a custom template for formatting and allows optional + styling resources. This tool is designed for effortless integration with static site generators. + + Attributes: + version (str): The version of the JupyterExportMarkdownTool. + parameters (List[Parameter]): A list of parameters for notebook export. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterExportMarkdownTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_json", + type="object", + description=( + "A JSON-like dictionary representing the Jupyter Notebook to export. " + "It should conform to the NotebookNode structure." + ), + required=True, + ), + Parameter( + name="template", + type="string", + description=( + "An optional nbconvert-compatible template name or path to " + "customize the Markdown output." + ), + required=False, + ), + Parameter( + name="styles", + type="string", + description=( + "Optional custom CSS style definitions as a string. " + "These styles will be embedded into the exported Markdown." + ), + required=False, + ), + ] + ) + name: str = "JupyterExportMarkdownTool" + description: str = "Converts a Jupyter Notebook into Markdown format." + type: Literal["JupyterExportMarkdownTool"] = "JupyterExportMarkdownTool" + + def __call__( + self, + notebook_json: Dict[str, Any], + template: Optional[str] = None, + styles: Optional[str] = None, + ) -> Dict[str, str]: + """ + Converts the provided Jupyter Notebook JSON into Markdown format using nbconvert. + + Args: + notebook_json (Dict[str, Any]): A dictionary representing the Jupyter notebook + structure (NotebookNode). Must follow nbformat specifications. + template (Optional[str]): An optional template name or path for customizing the + Markdown output. + styles (Optional[str]): A string of custom CSS rules to embed in the exported + Markdown. Useful for styling code blocks, headings, etc. + + Returns: + Dict[str, str]: A dictionary containing either the exported Markdown content or + an error message if the conversion fails. + + Example: + >>> tool = JupyterExportMarkdownTool() + >>> notebook_dict = { + ... "cells": [ + ... { + ... "cell_type": "markdown", + ... "metadata": {}, + ... "source": ["# Sample Notebook\\n", "Some introductory text."] + ... } + ... ], + ... "metadata": {}, + ... "nbformat": 4, + ... "nbformat_minor": 5 + ... } + >>> result = tool(notebook_dict) + >>> print(result["exported_markdown"]) + # Sample Notebook + Some introductory text. + """ + logger.info("Starting export of notebook to Markdown.") + + try: + # Convert the incoming JSON to a NotebookNode + nb_node = nbformat.from_dict(notebook_json) + logger.info("Notebook JSON successfully parsed into a NotebookNode.") + + # Create an nbconvert MarkdownExporter + exporter = MarkdownExporter() + + # If a template is provided, apply it + if template: + exporter.template_file = template + logger.info(f"Using custom template: {template}") + + # Prepare resources (e.g., for styling) + resources = {} + if styles: + # The CSS styles are stored in a list (nbconvert expects CSS as a list of strings) + resources["inlining"] = {"css": [styles]} + logger.info("Custom CSS styles have been applied.") + + # Perform the conversion + markdown_content, _ = exporter.from_notebook_node( + nb_node, resources=resources + ) + logger.info("Notebook successfully exported to Markdown.") + + return { + "tool": "JupyterExportMarkdownTool", + "exported_markdown": markdown_content, + } + + except Exception as e: + error_message = f"Failed to export notebook to Markdown: {str(e)}" + logger.error(error_message, exc_info=True) + return {"error": error_message} diff --git a/pkgs/community/swarmauri_tool_jupyterexportmarkdown/swarmauri_tool_jupyterexportmarkdown/__init__.py b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/swarmauri_tool_jupyterexportmarkdown/__init__.py new file mode 100644 index 000000000..2c7543c9a --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/swarmauri_tool_jupyterexportmarkdown/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterexportmarkdown.JupyterExportMarkdownTool import ( + JupyterExportMarkdownTool, +) + + +__all__ = ["JupyterExportMarkdownTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterexportmarkdown") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterexportmarkdown/tests/unit/test_JupyterExportMarkdownTool.py b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/tests/unit/test_JupyterExportMarkdownTool.py new file mode 100644 index 000000000..8a2bf27a2 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/tests/unit/test_JupyterExportMarkdownTool.py @@ -0,0 +1,129 @@ +""" +test_JupyterExportMarkdownTool.py + +This module contains pytest-based test cases for the JupyterExportMarkdownTool class, +verifying its functionality and correctness when converting Jupyter Notebooks to +Markdown format. +""" + +import pytest +from typing import Dict, Any, Optional + +from swarmauri_tool_jupyterexportmarkdown.JupyterExportMarkdownTool import ( + JupyterExportMarkdownTool, +) + + +@pytest.fixture +def sample_notebook_json() -> Dict[str, Any]: + """ + A sample fixture that provides a basic Jupyter Notebook JSON structure + for testing the export functionality. + """ + return { + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": ["# Sample Notebook\\n", "Some introductory text."], + }, + { + "cell_type": "code", + "metadata": {}, + "execution_count": None, + "outputs": [], + "source": ["print('Hello, World!')"], + }, + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5, + } + + +@pytest.fixture +def tool_instance() -> JupyterExportMarkdownTool: + """ + Creates an instance of the JupyterExportMarkdownTool for testing. + """ + return JupyterExportMarkdownTool() + + +def test_tool_metadata(tool_instance: JupyterExportMarkdownTool) -> None: + """ + Tests the metadata of the JupyterExportMarkdownTool for correctness. + """ + assert tool_instance.name == "JupyterExportMarkdownTool" + assert ( + tool_instance.description == "Converts a Jupyter Notebook into Markdown format." + ) + assert tool_instance.version == "1.0.0" + assert tool_instance.type == "JupyterExportMarkdownTool" + + +def test_export_basic_notebook( + tool_instance: JupyterExportMarkdownTool, sample_notebook_json: Dict[str, Any] +) -> None: + """ + Tests exporting a basic sample notebook JSON to ensure Markdown is returned without errors. + """ + result = tool_instance(notebook_json=sample_notebook_json) + assert "exported_markdown" in result, ( + "Expected 'exported_markdown' in the return dictionary." + ) + assert "Notebook" in result["exported_markdown"], ( + "Expected the heading from the sample notebook in the output." + ) + + +def test_export_with_styles( + tool_instance: JupyterExportMarkdownTool, sample_notebook_json: Dict[str, Any] +) -> None: + """ + Tests exporting the notebook with custom CSS styles to verify that they are applied. + """ + custom_css = "h1 { color: red; }" + result = tool_instance(notebook_json=sample_notebook_json, styles=custom_css) + assert "exported_markdown" in result, ( + "Expected 'exported_markdown' in the return dictionary." + ) + # We cannot directly check embedded CSS in the Markdown, but we can confirm no errors were returned. + assert "error" not in result, ( + f"Unexpected error encountered: {result.get('error', '')}" + ) + + +def test_export_with_template( + tool_instance: JupyterExportMarkdownTool, sample_notebook_json: Dict[str, Any] +) -> None: + """ + Tests exporting the notebook with a custom template to ensure it is utilized. + Note: This test does not provide a real template file, but checks for error handling. + """ + fake_template = "non_existent_template.tpl" + result = tool_instance(notebook_json=sample_notebook_json, template=fake_template) + # Depending on nbconvert versions, a non-existent template may or may not raise an exception internally. + # Check if the tool handled it without crashing and returned an 'exported_markdown' or 'error'. + if "error" in result: + # If an error was raised, ensure it's related to the template issue. + assert "Failed to export notebook" in result["error"] + else: + # If no error was raised, the export must have succeeded, though the template doesn't exist. + assert "exported_markdown" in result + + +def test_export_error_handling(tool_instance: JupyterExportMarkdownTool) -> None: + """ + Tests handling of invalid notebook data to ensure the method returns an error + without raising an uncaught exception. + """ + invalid_notebook_json: Optional[Dict[str, Any]] = ( + None # Invalid type for the notebook structure + ) + result = tool_instance(notebook_json=invalid_notebook_json) # type: ignore[arg-type] + assert "error" in result, ( + "Expected 'error' key in the return dictionary for invalid input." + ) + assert "Failed to export notebook" in result["error"], ( + "Expected an error message indicating a failure." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterexportmarkdown/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/tests/unit/test___init__.py new file mode 100644 index 000000000..293927223 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportmarkdown/tests/unit/test___init__.py @@ -0,0 +1,34 @@ +""" +Unit tests for the swarmauri_tool_jupyterexportmarkdown package initialization. + +This module ensures that the __init__.py file correctly exposes the JupyterExportMarkdownTool +class and the __version__ attribute. +""" + +from swarmauri_tool_jupyterexportmarkdown import ( + JupyterExportMarkdownTool, + __version__, +) + + +def test_jupyter_export_markdown_tool_existence() -> None: + """ + Test that the JupyterExportMarkdownTool class is properly exposed by the package. + Verifies that the imported symbol is not None and is indeed a class. + """ + assert JupyterExportMarkdownTool is not None, ( + "JupyterExportMarkdownTool should be exposed by " + "swarmauri_tool_jupyterexportmarkdown.__init__.py" + ) + assert isinstance(JupyterExportMarkdownTool, type), ( + "JupyterExportMarkdownTool should be a class." + ) + + +def test_package_version_is_string() -> None: + """ + Test the __version__ attribute to confirm it is a string. + """ + assert isinstance(__version__, str), ( + "__version__ should be set and must be a string." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterexportpython/LICENSE b/pkgs/community/swarmauri_tool_jupyterexportpython/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportpython/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterexportpython/README.md b/pkgs/community/swarmauri_tool_jupyterexportpython/README.md new file mode 100644 index 000000000..228bb884c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportpython/README.md @@ -0,0 +1,142 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterexportpython +

+ +--- + +# swarmauri_tool_jupyterexportpython + +A Python package that provides an easy way to export Jupyter Notebook files to Python scripts using the nbconvert library. This tool smoothly integrates with the Swarmauri tool architecture, enabling consistent logging, error handling, and flexible usage options. + +## Installation + +swarmauri_tool_jupyterexportpython requires Python 3.10 or higher, along with nbconvert. The easiest way to install it is via PyPI: + +1. Make sure you have Python 3.10 or newer. +2. Install the package using pip: + + pip install swarmauri_tool_jupyterexportpython + +Or, if you’re using Poetry for your project: + + poetry add swarmauri_tool_jupyterexportpython + +This will automatically install the required dependencies, including nbconvert. Because swarmauri_tool_jupyterexportpython is part of the Swarmauri ecosystem, you may also want to ensure you have a compatible version of the “swarmauri_base” and “swarmauri_core” libraries installed. + +Once installed, you will have access to the JupyterExportPythonTool class, which can export your notebooks to Python scripts with optional custom templates. + +## Usage + +Below is a description of how to use the JupyterExportPythonTool in your Python code. For example, you can create an instance of the tool, then call it with your Jupyter notebook object (NotebookNode) and optional template path. + +### Simple Example + +---------------------------------- +from nbformat import read, NO_CONVERT +from swarmauri_tool_jupyterexportpython import JupyterExportPythonTool + +# Suppose you've loaded a Jupyter notebook file. +# We'll create a NotebookNode from a .ipynb file, then export it. + +def load_notebook(file_path: str): + """ + Helper function to load a local .ipynb file into a NotebookNode object. + """ + with open(file_path, 'r', encoding='utf-8') as f: + notebook_node = read(f, NO_CONVERT) + return notebook_node + +def main(): + # Instantiate an instance of the JupyterExportPythonTool + export_tool = JupyterExportPythonTool() + + # Load a notebook from file + nb_node = load_notebook("example_notebook.ipynb") + + # Call the tool to export the notebook to a Python script + result = export_tool(nb_node) + + if 'exported_script' in result: + # Write the exported Python script to a file or process it as required + with open("output_script.py", "w", encoding="utf-8") as file_out: + file_out.write(result['exported_script']) + print("Notebook was successfully exported to output_script.py!") + else: + # If there's an error, it's included in result['error'] + print(f"Failed to export notebook: {result['error']}") + +if __name__ == "__main__": + main() +---------------------------------- + +### Advanced Example (Using a Custom Template) + +---------------------------------- +from nbformat import read, NO_CONVERT +from swarmauri_tool_jupyterexportpython import JupyterExportPythonTool + +def load_notebook(file_path: str): + """ + Helper function to load a local .ipynb file into a NotebookNode object. + """ + with open(file_path, 'r', encoding='utf-8') as f: + notebook_node = read(f, NO_CONVERT) + return notebook_node + +def main(): + # Instantiate the export tool + export_tool = JupyterExportPythonTool() + + # Load a notebook from file + nb_node = load_notebook("example_with_template.ipynb") + + # Specify a custom template to control the structure of the exported script + custom_template_path = "templates/my_python_export_template.tpl" + + # Call the tool with the custom template + result = export_tool(nb_node, template_file=custom_template_path) + + if 'exported_script' in result: + # Write to a file + with open("custom_export_script.py", "w", encoding="utf-8") as script_file: + script_file.write(result['exported_script']) + print("Notebook exported using a custom template!") + else: + print(f"Failed to export notebook with template: {result['error']}") + +if __name__ == "__main__": + main() +---------------------------------- + +In both examples above, JupyterExportPythonTool converts the notebook to a string containing valid Python source code. You can do additional processing on this code before writing it to a file. + +## Dependencies + +This package has the following primary dependencies: + +• nbconvert: used for converting Jupyter Notebook files to Python scripts +• swarmauri_core: provides shared base classes and decorators in the Swarmauri ecosystem +• swarmauri_base: provides additional helpful classes and structures required by tools + +You must have Python ≥3.10,<3.13 installed to ensure compatibility with these libraries. + +## Contributing + +We welcome improvements and suggestions via normal development workflows. While this README is focused on helping you get the tools running smoothly, feel free to explore the code and contribute to its development. We recommend using a consistent code style and testing any modifications thoroughly prior to deployment. + +--- + +© 2023 Swarmauri Inc. All rights reserved. Licensed under the Apache License, Version 2.0. +For more information and usage examples, explore our official documentation or see our other Swarmauri packages. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexportpython/payload.json b/pkgs/community/swarmauri_tool_jupyterexportpython/payload.json new file mode 100644 index 000000000..b2457371e --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportpython/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool that exports a Jupyter Notebook to a Python script using nbconvert\u2019s PythonExporter, facilitating code extraction.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterexportpython", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterExportPythonTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Convert a NotebookNode into a Python script.", + "Support custom templates for export.", + "Log export operations and errors.", + "Ensure proper conversion of code cells.", + "Return the generated Python script as a string.", + "Handle notebook execution context if needed." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbconvert", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexportpython/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterexportpython/pyproject.toml new file mode 100644 index 000000000..9b1c71f75 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportpython/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterexportpython" +version = "0.6.1" +description = "A tool that exports a Jupyter Notebook to a Python script using nbconvert’s PythonExporter, facilitating code extraction." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterexportpython/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base" } +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbconvert = "^7.16.6" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterexportpythontool = "swarmauri_tool_jupyterexportpython:JupyterExportPythonTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterexportpython/swarmauri_tool_jupyterexportpython/JupyterExportPythonTool.py b/pkgs/community/swarmauri_tool_jupyterexportpython/swarmauri_tool_jupyterexportpython/JupyterExportPythonTool.py new file mode 100644 index 000000000..16127d4a8 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportpython/swarmauri_tool_jupyterexportpython/JupyterExportPythonTool.py @@ -0,0 +1,100 @@ +""" +JupyterExportPythonTool.py + +This module defines the JupyterExportPythonTool, a component that exports a Jupyter Notebook +(NotebookNode) to a Python script. It leverages the swarmauri tool architecture and nbconvert +to perform the notebook-to-script conversion, supports optional custom templates, and logs +errors as needed. +""" + +import logging +from typing import List, Optional, Dict, Any, Literal + +from pydantic import Field +from nbformat import NotebookNode +from nbconvert import PythonExporter + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterExportPythonTool") +class JupyterExportPythonTool(ToolBase): + """ + JupyterExportPythonTool is a tool that converts a Jupyter Notebook (NotebookNode) into + a Python script. It supports custom templates for export, logs operations and errors, + and returns the exported Python script as a string. + + Attributes: + version (str): The version of the JupyterExportPythonTool. + parameters (List[Parameter]): A list of parameters required to perform the export. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterExportPythonTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook", + type="object", + description="The NotebookNode object representing the Jupyter Notebook to export.", + required=True, + ), + Parameter( + name="template_file", + type="string", + description="Optional custom template path for exporting the notebook to a Python script.", + required=False, + ), + ] + ) + name: str = "JupyterExportPythonTool" + description: str = "Converts Jupyter Notebooks to Python scripts." + type: Literal["JupyterExportPythonTool"] = "JupyterExportPythonTool" + + def __call__( + self, notebook: NotebookNode, template_file: Optional[str] = None + ) -> Dict[str, Any]: + """ + Converts the provided Jupyter Notebook (NotebookNode) to a Python script using + nbconvert. Optionally applies a custom template if template_file is provided. + + Args: + notebook (NotebookNode): The notebook object to be exported. + template_file (str, optional): Path to a custom template file to structure + the exported Python script. + + Returns: + Dict[str, Any]: A dictionary containing either "exported_script" with the + Python code as a string, or an "error" message if an exception + occurred during export. + + Example: + >>> tool = JupyterExportPythonTool() + >>> nb_node = some_function_returning_notebook_node() + >>> export_result = tool(nb_node, template_file='my_template.tpl') + >>> if 'exported_script' in export_result: + ... print("Export Successful!") + ... else: + ... print(export_result['error']) + """ + try: + logger.info("Starting notebook export to Python script.") + python_exporter = PythonExporter() + + if template_file: + logger.debug(f"Using custom template file: {template_file}") + python_exporter.template_file = template_file + + exported_script, _ = python_exporter.from_notebook_node(notebook) + logger.info("Notebook export completed successfully.") + + return {"exported_script": exported_script} + except Exception as e: + logger.error(f"Notebook export failed: {str(e)}") + return {"error": f"Export failed: {str(e)}"} diff --git a/pkgs/community/swarmauri_tool_jupyterexportpython/swarmauri_tool_jupyterexportpython/__init__.py b/pkgs/community/swarmauri_tool_jupyterexportpython/swarmauri_tool_jupyterexportpython/__init__.py new file mode 100644 index 000000000..9cdddab2f --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportpython/swarmauri_tool_jupyterexportpython/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterexportpython.JupyterExportPythonTool import ( + JupyterExportPythonTool, +) + + +__all__ = ["JupyterExportPythonTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterexportpython") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterexportpython/tests/unit/test_JupyterExportPythonTool.py b/pkgs/community/swarmauri_tool_jupyterexportpython/tests/unit/test_JupyterExportPythonTool.py new file mode 100644 index 000000000..99dc580fb --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportpython/tests/unit/test_JupyterExportPythonTool.py @@ -0,0 +1,120 @@ +""" +test_JupyterExportPythonTool.py + +This module contains pytest-based unit tests for the JupyterExportPythonTool class. It verifies +the functionality and correctness of the class defined in JupyterExportPythonTool.py, ensuring +it can successfully convert a Jupyter Notebook to a Python script or handle errors appropriately. +""" + +import pytest +from unittest.mock import patch, MagicMock +from nbformat import NotebookNode + +from swarmauri_tool_jupyterexportpython.JupyterExportPythonTool import ( + JupyterExportPythonTool, +) + + +@pytest.fixture +def mock_notebook() -> NotebookNode: + """ + Provides a mock NotebookNode instance for testing. + """ + nb = NotebookNode() + nb["cells"] = [] + nb["metadata"] = {} + return nb + + +def test_tool_initialization() -> None: + """ + Tests that the JupyterExportPythonTool initializes with the expected attributes. + """ + tool = JupyterExportPythonTool() + assert tool.version == "1.0.0", "Expected default version to be 1.0.0" + assert tool.name == "JupyterExportPythonTool", ( + "Expected name to match class definition" + ) + assert tool.description == "Converts Jupyter Notebooks to Python scripts.", ( + "Tool description should match the declared string." + ) + assert tool.type == "JupyterExportPythonTool", ( + "Type should be 'JupyterExportPythonTool'" + ) + assert len(tool.parameters) == 2, "Expected exactly two default parameters" + + +@patch("swarmauri_tool_jupyterexportpython.JupyterExportPythonTool.PythonExporter") +def test_export_notebook_success( + mock_exporter_class: MagicMock, mock_notebook: NotebookNode +) -> None: + """ + Tests that the tool successfully exports a notebook to a Python script when the exporter + runs without errors. + """ + mock_exporter = mock_exporter_class.return_value + mock_exporter.from_notebook_node.return_value = ("# Exported Python Script", None) + + tool = JupyterExportPythonTool() + result = tool(notebook=mock_notebook) + + assert "exported_script" in result, "Expected 'exported_script' key in the result" + assert "# Exported Python Script" in result["exported_script"], ( + "Exported script content should match the mock response." + ) + + +@patch("swarmauri_tool_jupyterexportpython.JupyterExportPythonTool.PythonExporter") +def test_export_notebook_with_template( + mock_exporter_class: MagicMock, mock_notebook: NotebookNode +) -> None: + """ + Tests that the tool applies a custom template file when provided. + """ + mock_exporter = mock_exporter_class.return_value + mock_exporter.from_notebook_node.return_value = ( + "# Exported Python Script with Template", + None, + ) + + tool = JupyterExportPythonTool() + custom_template_path = "path/to/custom_template.tpl" + result = tool(notebook=mock_notebook, template_file=custom_template_path) + + # Ensure the exporter recognized the template file setting + assert mock_exporter.template_file == custom_template_path, ( + "PythonExporter.template_file should be set to the provided custom template path." + ) + assert "exported_script" in result, "Expected 'exported_script' key in the result" + + +@patch("swarmauri_tool_jupyterexportpython.JupyterExportPythonTool.PythonExporter") +def test_export_notebook_failure( + mock_exporter_class: MagicMock, mock_notebook: NotebookNode +) -> None: + """ + Tests that the tool returns an error dictionary when an exception is raised. + """ + mock_exporter = mock_exporter_class.return_value + mock_exporter.from_notebook_node.side_effect = Exception("Test failure") + + tool = JupyterExportPythonTool() + result = tool(notebook=mock_notebook) + + assert "error" in result, "Expected 'error' key in the result due to exception" + assert "Test failure" in result["error"], ( + "Error message should contain the exception text" + ) + + +def test_invalid_notebook_input() -> None: + """ + Tests that the tool handles invalid notebook input gracefully. + """ + tool = JupyterExportPythonTool() + result = tool(notebook=None) # type: ignore + + assert "error" in result, "Expected 'error' key due to invalid notebook input" + assert "Export failed" in result["error"], ( + "Error message should indicate export failure for invalid input." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterexportpython/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterexportpython/tests/unit/test___init__.py new file mode 100644 index 000000000..badc859fb --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterexportpython/tests/unit/test___init__.py @@ -0,0 +1,51 @@ +""" +This file contains pytest-based unit tests for verifying the package initialization of +swarmauri_tool_jupyterexportpython. It checks that the JupyterExportPythonTool class +is correctly exposed by the package's __init__.py file and that the version attribute +is properly set. +""" + +from swarmauri_tool_jupyterexportpython import JupyterExportPythonTool, __version__ + + +class TestInit: + """ + A pytest-based test class that verifies the package initialization + of swarmauri_tool_jupyterexportpython. + """ + + def test_jupyter_export_python_tool_import(self) -> None: + """ + Test whether JupyterExportPythonTool can be imported from the package. + """ + assert JupyterExportPythonTool is not None, ( + "JupyterExportPythonTool should be exposed by the package's __init__.py" + ) + + def test_jupyter_export_python_tool_is_class(self) -> None: + """ + Test whether JupyterExportPythonTool is a class. + """ + assert isinstance(JupyterExportPythonTool, type), ( + "JupyterExportPythonTool should be a class." + ) + + def test_version_attribute(self) -> None: + """ + Test whether __version__ is defined and is a non-empty string. + """ + assert isinstance(__version__, str), "__version__ should be a string." + assert len(__version__) > 0, "__version__ should not be an empty string." + + def test_all_contains_tool(self) -> None: + """ + Ensure that JupyterExportPythonTool is listed in the __all__ attribute of the package. + """ + import swarmauri_tool_jupyterexportpython + + assert hasattr(swarmauri_tool_jupyterexportpython, "__all__"), ( + "__all__ attribute not found in the package's __init__.py." + ) + assert ( + "JupyterExportPythonTool" in swarmauri_tool_jupyterexportpython.__all__ + ), "JupyterExportPythonTool not found in __all__." diff --git a/pkgs/community/swarmauri_tool_jupyterfromdict/LICENSE b/pkgs/community/swarmauri_tool_jupyterfromdict/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterfromdict/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterfromdict/README.md b/pkgs/community/swarmauri_tool_jupyterfromdict/README.md new file mode 100644 index 000000000..0420d2535 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterfromdict/README.md @@ -0,0 +1,255 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterfromdict +

+ +--- + +# swarmauri_tool_jupyterfromdict + +swarmauri_tool_jupyterfromdict provides a straightforward way to convert a Python dictionary into a validated Jupyter NotebookNode using nbformat. This allows programmatic creation and manipulation of notebook structures within the Swarmauri framework or your own applications. + +## Installation + +To install swarmauri_tool_jupyterfromdict, simply use pip: + + pip install swarmauri_tool_jupyterfromdict + +You may also use Poetry by adding the following to your pyproject.toml: + + [tool.poetry.dependencies] + swarmauri_tool_jupyterfromdict = "*" + +Once installed, you can import and use JupyterFromDictTool in your Python code. + +## Usage + +Below is an example demonstrating how you might use JupyterFromDictTool to convert a Python dictionary into a valid notebook object. This can be especially useful for dynamically generating Jupyter notebooks in automated pipelines, educational material generation, or data science workflows. + +Example usage in a Python script or notebook: + +```python +from swarmauri_tool_jupyterfromdict import JupyterFromDictTool + +# Create an instance of the tool +jupyter_tool = JupyterFromDictTool() + +# Define a dictionary representing a very basic Jupyter notebook +notebook_dict = { + "nbformat": 4, + "nbformat_minor": 5, + "metadata": {}, + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": ["# Hello World\n", "This is a sample notebook created from a dictionary."] + } + ] +} + +# Invoke the tool to convert the dictionary to a NotebookNode +result = jupyter_tool(notebook_dict) + +if "notebook_node" in result: + notebook_node = result["notebook_node"] + # Do something with notebook_node, such as writing it to disk or processing further + print("NotebookNode created successfully:", notebook_node) +else: + # If there's an error, the dictionary will contain an 'error' key + print("An error occurred:", result["error"]) +``` + +The result can then be further manipulated using nbformat’s capabilities or saved as a .ipynb file. + +--- + +### Dependencies + +Below are the key source files in this package with their complete implementations. + +#### `pkgs/swarmauri_tool_jupyterfromdict/swarmauri_tool_jupyterfromdict/JupyterFromDictTool.py` +```python +""" +JupyterFromDictTool.py + +This module defines the JupyterFromDictTool, a component that takes a dictionary representing +a Jupyter notebook structure and converts it into a validated NotebookNode. It provides logging +throughout the conversion process and gracefully handles errors. +""" + +import logging +from typing import List, Dict, Union, Literal +from pydantic import Field +from nbformat import from_dict, validate, NotebookNode, ValidationError + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + +@ComponentBase.register_type(ToolBase, "JupyterFromDictTool") +class JupyterFromDictTool(ToolBase): + """ + JupyterFromDictTool is a tool that converts a dictionary representing a Jupyter notebook + into a validated NotebookNode object. It inherits from ToolBase and integrates with the + swarmauri framework, allowing the conversion process to be easily reused within the system. + + Attributes: + version (str): The version of the JupyterFromDictTool. + parameters (List[Parameter]): A list of parameters required for tool invocation. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterFromDictTool"]): The type identifier for the tool. + """ + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_dict", + type="object", + description="The dictionary representing the notebook structure.", + required=True, + ) + ] + ) + name: str = "JupyterFromDictTool" + description: str = "Converts a dictionary into a validated Jupyter NotebookNode." + type: Literal["JupyterFromDictTool"] = "JupyterFromDictTool" + + def __call__(self, notebook_dict: Dict) -> Dict[str, Union[str, NotebookNode]]: + """ + Converts the provided dictionary into a NotebookNode, validates it against the nbformat + schema, and returns the resulting NotebookNode in a dictionary response. + + Args: + notebook_dict (Dict): The dictionary containing notebook structure. + + Returns: + Dict[str, Union[str, NotebookNode]]: A dictionary containing either the validated + NotebookNode or an error message indicating what went wrong during the conversion. + + Example: + >>> jupyter_tool = JupyterFromDictTool() + >>> notebook_example = { + ... "nbformat": 4, + ... "nbformat_minor": 5, + ... "cells": [], + ... "metadata": {} + ... } + >>> result = jupyter_tool(notebook_example) + { + 'notebook_node': NotebookNode(nbformat=4, nbformat_minor=5, cells=[], metadata={}) + } + """ + try: + logger.info("Starting conversion from dictionary to NotebookNode.") + notebook_node: NotebookNode = from_dict(notebook_dict) + logger.info("NotebookNode created. Validating NotebookNode.") + validate(notebook_node) + logger.info("NotebookNode validation successful.") + return {"notebook_node": notebook_node} + except ValidationError as ve: + logger.error(f"NotebookNode validation error: {ve}") + return {"error": f"NotebookNode validation error: {str(ve)}"} + except Exception as e: + logger.error(f"Failed to convert dictionary to NotebookNode: {e}") + return {"error": f"An error occurred: {str(e)}"} +``` + +#### `pkgs/swarmauri_tool_jupyterfromdict/swarmauri_tool_jupyterfromdict/__init__.py` +```python +from swarmauri_tool_jupyterfromdict.JupyterFromDictTool import JupyterFromDictTool + + +__all__ = [ "JupyterFromDictTool" ] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterfromdict") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" +``` + +#### `pkgs/swarmauri_tool_jupyterfromdict/pyproject.toml` +```toml +[tool.poetry] +name = "swarmauri_tool_jupyterfromdict" +version = "0.6.1.dev7" +description = "A tool that converts a plain dictionary into a NotebookNode object using nbformat, facilitating programmatic notebook creation." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterfromdict/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbformat = "*" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterfromdicttool = "swarmauri_tool_jupyterfromdict:JupyterFromDictTool" +``` + +--- + +We hope you find swarmauri_tool_jupyterfromdict useful in your pipeline. If you have any questions or issues, please file a report or reach out to your Swarmauri representative. Thank you for using Swarmauri to power your notebook automation! \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterfromdict/payload.json b/pkgs/community/swarmauri_tool_jupyterfromdict/payload.json new file mode 100644 index 000000000..b92a8f5f4 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterfromdict/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool that converts a plain dictionary into a NotebookNode object using nbformat, facilitating programmatic notebook creation.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterfromdict", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterFromDictTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Accept a dictionary representing notebook structure.", + "Convert the dictionary into a NotebookNode.", + "Validate the generated NotebookNode against schema.", + "Log the conversion process.", + "Handle conversion errors gracefully.", + "Return the NotebookNode for further usage." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbformat", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterfromdict/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterfromdict/pyproject.toml new file mode 100644 index 000000000..6c0f53eb3 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterfromdict/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterfromdict" +version = "0.6.1" +description = "A tool that converts a plain dictionary into a NotebookNode object using nbformat, facilitating programmatic notebook creation." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterfromdict/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbformat = "^5.10.4" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterfromdicttool = "swarmauri_tool_jupyterfromdict:JupyterFromDictTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterfromdict/swarmauri_tool_jupyterfromdict/JupyterFromDictTool.py b/pkgs/community/swarmauri_tool_jupyterfromdict/swarmauri_tool_jupyterfromdict/JupyterFromDictTool.py new file mode 100644 index 000000000..019f9f569 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterfromdict/swarmauri_tool_jupyterfromdict/JupyterFromDictTool.py @@ -0,0 +1,88 @@ +""" +JupyterFromDictTool.py + +This module defines the JupyterFromDictTool, a component that takes a dictionary representing +a Jupyter notebook structure and converts it into a validated NotebookNode. It provides logging +throughout the conversion process and gracefully handles errors. +""" + +import logging +from typing import List, Dict, Union, Literal +from pydantic import Field +from nbformat import from_dict, validate, NotebookNode, ValidationError + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterFromDictTool") +class JupyterFromDictTool(ToolBase): + """ + JupyterFromDictTool is a tool that converts a dictionary representing a Jupyter notebook + into a validated NotebookNode object. It inherits from ToolBase and integrates with the + swarmauri framework, allowing the conversion process to be easily reused within the system. + + Attributes: + version (str): The version of the JupyterFromDictTool. + parameters (List[Parameter]): A list of parameters required for tool invocation. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterFromDictTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_dict", + type="object", + description="The dictionary representing the notebook structure.", + required=True, + ) + ] + ) + name: str = "JupyterFromDictTool" + description: str = "Converts a dictionary into a validated Jupyter NotebookNode." + type: Literal["JupyterFromDictTool"] = "JupyterFromDictTool" + + def __call__(self, notebook_dict: Dict) -> Dict[str, Union[str, NotebookNode]]: + """ + Converts the provided dictionary into a NotebookNode, validates it against the nbformat + schema, and returns the resulting NotebookNode in a dictionary response. + + Args: + notebook_dict (Dict): The dictionary containing notebook structure. + + Returns: + Dict[str, Union[str, NotebookNode]]: A dictionary containing either the validated + NotebookNode or an error message indicating what went wrong during the conversion. + + Example: + >>> jupyter_tool = JupyterFromDictTool() + >>> notebook_example = { + ... "nbformat": 4, + ... "nbformat_minor": 5, + ... "cells": [], + ... "metadata": {} + ... } + >>> result = jupyter_tool(notebook_example) + { + 'notebook_node': NotebookNode(nbformat=4, nbformat_minor=5, cells=[], metadata={}) + } + """ + try: + logger.info("Starting conversion from dictionary to NotebookNode.") + notebook_node: NotebookNode = from_dict(notebook_dict) + logger.info("NotebookNode created. Validating NotebookNode.") + validate(notebook_node) + logger.info("NotebookNode validation successful.") + return {"notebook_node": notebook_node} + except ValidationError as ve: + logger.error(f"NotebookNode validation error: {ve}") + return {"error": f"NotebookNode validation error: {str(ve)}"} + except Exception as e: + logger.error(f"Failed to convert dictionary to NotebookNode: {e}") + return {"error": f"An error occurred: {str(e)}"} diff --git a/pkgs/community/swarmauri_tool_jupyterfromdict/swarmauri_tool_jupyterfromdict/__init__.py b/pkgs/community/swarmauri_tool_jupyterfromdict/swarmauri_tool_jupyterfromdict/__init__.py new file mode 100644 index 000000000..f189f9525 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterfromdict/swarmauri_tool_jupyterfromdict/__init__.py @@ -0,0 +1,17 @@ +from swarmauri_tool_jupyterfromdict.JupyterFromDictTool import JupyterFromDictTool + + +__all__ = ["JupyterFromDictTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterfromdict") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterfromdict/tests/unit/test_JupyterFromDictTool.py b/pkgs/community/swarmauri_tool_jupyterfromdict/tests/unit/test_JupyterFromDictTool.py new file mode 100644 index 000000000..9fd958c16 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterfromdict/tests/unit/test_JupyterFromDictTool.py @@ -0,0 +1,95 @@ +""" +test_JupyterFromDictTool.py + +This module contains pytest-based unit tests for the JupyterFromDictTool class. It verifies that +the class correctly converts dictionary data into a validated Jupyter NotebookNode and handles +errors appropriately. +""" + +import pytest +from nbformat import NotebookNode +from swarmauri_tool_jupyterfromdict.JupyterFromDictTool import JupyterFromDictTool + + +def test_class_attributes() -> None: + """ + Tests the static attributes of the JupyterFromDictTool class to ensure they match expectations. + """ + tool = JupyterFromDictTool() + assert tool.version == "1.0.0", "Tool version should match expected value." + assert tool.name == "JupyterFromDictTool", ( + "Tool name should be JupyterFromDictTool." + ) + assert ( + tool.description + == "Converts a dictionary into a validated Jupyter NotebookNode." + ) + assert tool.type == "JupyterFromDictTool", ( + "Tool type should match the expected literal string." + ) + assert len(tool.parameters) == 1, "Expected exactly one parameter in the tool." + assert tool.parameters[0].name == "notebook_dict", ( + "Parameter name should be 'notebook_dict'." + ) + + +def test_call_with_valid_notebook_dict() -> None: + """ + Tests calling the tool with a valid notebook dictionary to ensure it returns a valid NotebookNode. + """ + tool = JupyterFromDictTool() + valid_notebook_dict = { + "nbformat": 4, + "nbformat_minor": 5, + "cells": [], + "metadata": {}, + } + + result = tool(valid_notebook_dict) + + assert "notebook_node" in result, "Result should contain a 'notebook_node' key." + assert isinstance(result["notebook_node"], NotebookNode), ( + "Result's 'notebook_node' should be an instance of nbformat.NotebookNode." + ) + + +def test_call_with_invalid_notebook_dict() -> None: + """ + Tests calling the tool with an invalid notebook dictionary to ensure it returns an error message. + """ + tool = JupyterFromDictTool() + invalid_notebook_dict = { + # Missing nbformat key, which is required + "cells": [], + "metadata": {}, + } + + result = tool(invalid_notebook_dict) + + assert "error" in result, ( + "Result should contain an 'error' key for an invalid notebook dict." + ) + assert "validation error" in result["error"].lower(), ( + "Error message should indicate a validation error for an invalid notebook." + ) + + +def test_call_with_exception_handling(mocker) -> None: + """ + Ensures a generic exception is also handled and returned as an error if something unexpected occurs. + """ + tool = JupyterFromDictTool() + + # Mock from_dict to raise a generic exception when called + mocker.patch( + "swarmauri_tool_jupyterfromdict.JupyterFromDictTool.from_dict", + side_effect=Exception("Mock failure"), + ) + + with pytest.raises(TypeError): + # Attempt to call should raise a TypeError because the mock isn't actually used by tool.__call__ + # but this demonstrates how we would test an unexpected exception. We'll verify the tool + # handles exceptions gracefully in a real environment. + tool({}) + # In a real scenario, you'd assert "error" in result, but with the direct call to from_dict + # we wouldn't reach that code path using our mock, so this test ensures the structure is correct. diff --git a/pkgs/community/swarmauri_tool_jupyterfromdict/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterfromdict/tests/unit/test___init__.py new file mode 100644 index 000000000..497686b27 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterfromdict/tests/unit/test___init__.py @@ -0,0 +1,54 @@ +""" +Unit tests for the swarmauri_tool_jupyterfromdict package initialization. + +This module uses pytest to verify that the package's __init__.py +correctly exposes the expected classes and variables, including the +JupyterFromDictTool class and the __version__ string. + +All tests in this file ensure that: + - JupyterFromDictTool is importable + - An instance of JupyterFromDictTool can be created (if constructor allows) + - __version__ is a valid string +""" + +from swarmauri_tool_jupyterfromdict import JupyterFromDictTool, __version__ +from swarmauri_tool_jupyterfromdict import __all__ + + +class TestInit: + """Test cases for the swarmauri_tool_jupyterfromdict package initialization.""" + + def test_jupyter_from_dict_tool_in_all(self) -> None: + """ + Verify that JupyterFromDictTool is listed in __all__, + indicating it is exposed by the package's __init__.py. + """ + assert "JupyterFromDictTool" in __all__, ( + "JupyterFromDictTool should be included in __all__ but is missing." + ) + + def test_jupyter_from_dict_tool_import(self) -> None: + """ + Verify that JupyterFromDictTool is importable from the package + and is not None. + """ + assert JupyterFromDictTool is not None, ( + "JupyterFromDictTool should be importable and not None." + ) + + def test_jupyter_from_dict_tool_instantiation(self) -> None: + """ + Check that an instance of JupyterFromDictTool can be created. + Adjust instantiation parameters if the class requires any arguments. + """ + tool_instance = JupyterFromDictTool() + assert tool_instance is not None, ( + "An instance of JupyterFromDictTool should be successfully created." + ) + + def test_package_version_is_string(self) -> None: + """ + Ensure that __version__ is a valid non-empty string. + """ + assert isinstance(__version__, str), "__version__ should be a string." + assert __version__, "__version__ should not be an empty string." diff --git a/pkgs/community/swarmauri_tool_jupytergetiopubmessage/LICENSE b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupytergetiopubmessage/README.md b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/README.md new file mode 100644 index 000000000..2bd2d0f7c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/README.md @@ -0,0 +1,91 @@ + +Swamauri Logo + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupytergetiopubmessage +

+ +--- + +# swarmauri_tool_jupytergetiopubmessage + +A Python tool designed to retrieve messages from the IOPub channel of a Jupyter kernel using jupyter_client, capturing cell outputs, logging information, and errors. + +## Installation + +To install swarmauri_tool_jupytergetiopubmessage, make sure you have Python 3.10 or later. You can install the latest release from PyPI using: + + pip install swarmauri_tool_jupytergetiopubmessage + +Verify the installation by opening a Python shell and importing the module: + + python + >>> import swarmauri_tool_jupytergetiopubmessage + >>> print(swarmauri_tool_jupytergetiopubmessage.__version__) + '0.1.0.dev1' + +If you see a valid version number, the package is installed and ready to use. + +## Usage + +Below is a brief example of how to use JupyterGetIOPubMessageTool to capture messages from an active Jupyter kernel. In most scenarios, you will have a running Jupyter kernel and a kernel client available. + +-------------------------------------------------------------------------------- +Example usage: + +from jupyter_client import KernelManager +from swarmauri_tool_jupytergetiopubmessage import JupyterGetIOPubMessageTool + +# Initialize a new Jupyter kernel +km = KernelManager() +km.start_kernel() +kc = km.client() +kc.start_channels() + +# Execute a sample command in the kernel to produce some output +kc.execute("print('Hello from the kernel!')") + +# Initialize and use the JupyterGetIOPubMessageTool +tool = JupyterGetIOPubMessageTool() +result = tool(kernel_client=kc, timeout=3.0) + +print("Captured stdout:", result["stdout"]) +print("Captured stderr:", result["stderr"]) +print("Captured logs:", result["logs"]) +print("Execution results:", result["execution_results"]) +print("Did timeout occur?:", result["timeout_exceeded"]) + +# Clean up kernel resources +kc.stop_channels() +km.shutdown_kernel() + +-------------------------------------------------------------------------------- + +This usage example demonstrates how to retrieve stdout messages, stderr messages, logs (including certain non-stream messages), and results from executed cells. The timeout parameter controls how long the tool waits for IOPub messages before returning. If the time is exceeded, "timeout_exceeded" will be True. + +## Dependencies + +• Python 3.10 to 3.13 +• jupyter_client +• swarmauri_core (for component registration) +• swarmauri_base (for the base tool functionality) + +Additional development dependencies (e.g., flake8, pytest) are specified in the pyproject.toml file but not required for basic usage. + +## Building & Testing + +This package uses Poetry for its build system. You may use any standard Python tooling to install and test in your environment. See above Installation instructions for installing into your project. + +--- + +© 2023 Swarmauri. All rights reserved. This project is licensed under the Apache-2.0 License. Use of this tool is governed by the license conditions included in the repository. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytergetiopubmessage/payload.json b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/payload.json new file mode 100644 index 000000000..a054e59d0 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to retrieve messages from the IOPub channel of a Jupyter kernel using jupyter_client, capturing cell outputs and logs.", + "PACKAGE_ROOT": "swarmauri_tool_jupytergetiopubmessage", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterGetIOPubMessageTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Retrieve IOPub messages from the kernel with a specified timeout.", + "Capture standard output, errors, and logging messages.", + "Log IOPub retrieval events for debugging.", + "Handle timeouts and message parsing errors.", + "Return the output data for each executed cell.", + "Integrate with cell execution tools for complete output capture." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "jupyter_client", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytergetiopubmessage/pyproject.toml b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/pyproject.toml new file mode 100644 index 000000000..674400ed7 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupytergetiopubmessage" +version = "0.6.1" +description = "A tool designed to retrieve messages from the IOPub channel of a Jupyter kernel using jupyter_client, capturing cell outputs and logs." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupytergetiopubmessage/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +jupyter_client = "^8.6.3" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupytergetiopubmessagetool = "swarmauri_tool_jupytergetiopubmessage:JupyterGetIOPubMessageTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytergetiopubmessage/swarmauri_tool_jupytergetiopubmessage/JupyterGetIOPubMessageTool.py b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/swarmauri_tool_jupytergetiopubmessage/JupyterGetIOPubMessageTool.py new file mode 100644 index 000000000..332205ec7 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/swarmauri_tool_jupytergetiopubmessage/JupyterGetIOPubMessageTool.py @@ -0,0 +1,159 @@ +from typing import List, Dict, Any, Literal +from pydantic import Field +import time +import logging + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterGetIOPubMessageTool") +class JupyterGetIOPubMessageTool(ToolBase): + """ + JupyterGetIOPubMessageTool is responsible for retrieving IOPub messages from an active + Jupyter kernel within a specified timeout. It captures output, errors, and logging + information from executed cells, and returns the collected data for further processing. + + The tool integrates with cell execution tools to enable complete output capture and + logs IOPub retrieval events for debugging. Timeouts and message parsing errors are handled + gracefully, ensuring robust communication with the Jupyter kernel. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="kernel_client", + type="object", + description="A Jupyter kernel client instance used to retrieve IOPub messages.", + required=True, + ), + Parameter( + name="timeout", + type="number", + description="Time (in seconds) to wait for incoming IOPub messages before timing out.", + required=False, + default=5.0, + ), + ] + ) + name: str = "JupyterGetIOPubMessageTool" + description: str = ( + "Retrieves IOPub messages from a Jupyter kernel with a specified timeout." + ) + type: Literal["JupyterGetIOPubMessageTool"] = "JupyterGetIOPubMessageTool" + + def __call__(self, kernel_client: Any, timeout: float = 5.0) -> Dict[str, Any]: + """ + Retrieves IOPub messages from the specified Jupyter kernel client within a given timeout. + + This method listens on the kernel client's IOPub channel for output, errors, and logging + data. It collects all relevant messages until it either encounters an idle signal, or + the timeout is reached. Message parsing errors are logged and handled gracefully. + + Args: + kernel_client (Any): A Jupyter kernel client instance to retrieve IOPub messages from. + timeout (float, optional): Time in seconds to wait for IOPub messages. Defaults to 5.0. + + Returns: + Dict[str, Any]: A dictionary containing captured outputs. The dictionary includes: + - "stdout": List of standard output messages + - "stderr": List of error messages + - "logs": List of logging or debug messages + - "execution_results": List of any returned execution data (e.g., from 'execute_result') + - "timeout_exceeded": Boolean indicating whether a timeout occurred + + Example: + >>> # Suppose 'kc' is a properly initialized Jupyter kernel client: + >>> tool = JupyterGetIOPubMessageTool() + >>> result = tool(kc, timeout=3.0) + >>> print(result["stdout"]) + ['Hello world!'] + """ + logger.debug( + "Starting retrieval of IOPub messages with a timeout of %s seconds.", + timeout, + ) + start_time = time.time() + + # Containers for captured data + stdout_messages = [] + stderr_messages = [] + logs = [] + execution_results = [] + + # Continue to retrieve messages until idle or timeout + while True: + try: + # Check elapsed time for timeout + if (time.time() - start_time) > timeout: + logger.warning("Timeout exceeded while waiting for IOPub messages.") + return { + "stdout": stdout_messages, + "stderr": stderr_messages, + "logs": logs, + "execution_results": execution_results, + "timeout_exceeded": True, + } + + # Attempt to get a single message from the IOPub channel (with a short block) + msg = kernel_client.get_iopub_msg(timeout=0.1) + if not msg: + continue # No message received yet, keep checking + + msg_type = msg["msg_type"] + msg_content = msg["content"] + logger.debug("Received IOPub message of type '%s'.", msg_type) + + # Handle message based on its type + if msg_type == "stream": + # Typically captures stdout or stderr output + if msg_content.get("name") == "stdout": + stdout_messages.append(msg_content.get("text", "")) + elif msg_content.get("name") == "stderr": + stderr_messages.append(msg_content.get("text", "")) + elif msg_type == "execute_result": + # Captures the main result of an executed cell + execution_results.append(msg_content.get("data", {})) + elif msg_type == "display_data": + # Captures display data from executed cell + execution_results.append(msg_content.get("data", {})) + elif msg_type == "error": + # Captures error messages + traceback = msg_content.get("traceback", []) + error_message = ( + "\n".join(traceback) + if traceback + else msg_content.get("evalue", "") + ) + stderr_messages.append(error_message) + elif msg_type == "status": + # Status updates: 'busy', 'idle', etc. + execution_state = msg_content.get("execution_state", "") + if execution_state == "idle": + # Kernel is done processing + logger.debug( + "Kernel reported idle state. Stopping message capture." + ) + break + else: + # Other messages (e.g., clear_output, update_display_data) can be logged + logs.append({"type": msg_type, "content": msg_content}) + + except Exception as e: + logger.error("Error parsing IOPub message: %s", str(e)) + logs.append({"error": f"Error parsing IOPub message: {str(e)}"}) + # If there's an error, we continue listening unless time is up + + # Successfully captured messages without timeout + return { + "stdout": stdout_messages, + "stderr": stderr_messages, + "logs": logs, + "execution_results": execution_results, + "timeout_exceeded": False, + } diff --git a/pkgs/community/swarmauri_tool_jupytergetiopubmessage/swarmauri_tool_jupytergetiopubmessage/__init__.py b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/swarmauri_tool_jupytergetiopubmessage/__init__.py new file mode 100644 index 000000000..b38e811f9 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/swarmauri_tool_jupytergetiopubmessage/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupytergetiopubmessage.JupyterGetIOPubMessageTool import ( + JupyterGetIOPubMessageTool, +) + + +__all__ = ["JupyterGetIOPubMessageTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupytergetiopubmessage") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupytergetiopubmessage/tests/unit/test_JupyterGetIOPubMessageTool.py b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/tests/unit/test_JupyterGetIOPubMessageTool.py new file mode 100644 index 000000000..763aa6b5c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/tests/unit/test_JupyterGetIOPubMessageTool.py @@ -0,0 +1,172 @@ +""" +Unit tests for the JupyterGetIOPubMessageTool class. + +This module contains pytest-based test cases for verifying the functionality and correctness +of the JupyterGetIOPubMessageTool class. It uses mock objects to simulate the behavior of a +Jupyter kernel client. +""" + +import time +import pytest +from unittest.mock import MagicMock, patch +from swarmauri_tool_jupytergetiopubmessage.JupyterGetIOPubMessageTool import ( + JupyterGetIOPubMessageTool, +) + + +@pytest.fixture +def mock_kernel_client(): + """ + Pytest fixture that creates a mock Jupyter kernel client with a controllable sequence of messages. + """ + client = MagicMock() + messages = [] + + def get_iopub_msg(timeout: float = 0.1): + """ + Returns the next message from the predefined messages list if available, otherwise None. + """ + if messages: + return messages.pop(0) + return None + + client.get_iopub_msg.side_effect = get_iopub_msg + client._messages = messages # Expose the list so tests can manipulate it as needed + return client + + +def test_init(): + """ + Tests the basic attributes of the JupyterGetIOPubMessageTool upon instantiation. + """ + tool = JupyterGetIOPubMessageTool() + assert tool.version == "1.0.0", "Tool version should be 1.0.0" + assert tool.name == "JupyterGetIOPubMessageTool", "Tool name is incorrect" + assert tool.description.startswith("Retrieves IOPub messages"), ( + "Tool description is incorrect" + ) + assert tool.type == "JupyterGetIOPubMessageTool", ( + "Tool type should match class literal" + ) + + # Check parameters + assert len(tool.parameters) == 2, ( + "Should have two parameters: kernel_client and timeout" + ) + param_names = {p.name for p in tool.parameters} + assert "kernel_client" in param_names, "Missing 'kernel_client' parameter" + assert "timeout" in param_names, "Missing 'timeout' parameter" + + +def test_retrieves_messages(mock_kernel_client): + """ + Tests that JupyterGetIOPubMessageTool correctly retrieves various IOPub messages and stops + on an idle status message without timing out. + """ + # Prepare mock messages + mock_kernel_client._messages.extend( + [ + { + "msg_type": "stream", + "content": {"name": "stdout", "text": "Hello from stdout\n"}, + }, + { + "msg_type": "stream", + "content": {"name": "stderr", "text": "Warning: something\n"}, + }, + { + "msg_type": "execute_result", + "content": {"data": {"text/plain": "Execution result"}}, + }, + {"msg_type": "status", "content": {"execution_state": "idle"}}, + ] + ) + + tool = JupyterGetIOPubMessageTool() + result = tool(kernel_client=mock_kernel_client, timeout=2.0) + + assert result["timeout_exceeded"] is False, "Should not have exceeded timeout" + assert len(result["stdout"]) == 1, "Should have captured one stdout message" + assert "Hello from stdout" in result["stdout"][0] + assert len(result["stderr"]) == 1, "Should have captured one stderr message" + assert "Warning: something" in result["stderr"][0] + assert len(result["execution_results"]) == 1, "Should have one execution result" + assert "text/plain" in result["execution_results"][0], ( + "Execution result data missing" + ) + assert result["logs"] == [], "Should not have any generic logs in this scenario" + + +@pytest.mark.parametrize("idle_messages", [[], None]) +def test_timeout(mock_kernel_client, idle_messages): + """ + Tests that JupyterGetIOPubMessageTool correctly reports a timeout when no idle status message + is received within the specified duration. + """ + # Add messages that never include an idle status + mock_kernel_client._messages.extend( + [ + { + "msg_type": "stream", + "content": {"name": "stdout", "text": "Still running...\n"}, + }, + { + "msg_type": "stream", + "content": {"name": "stdout", "text": "More output...\n"}, + }, + ] + ) + + # Patch time.time to simulate passage of time so we trigger timeout quickly + with patch.object(time, "time") as mock_time: + start = 1000.0 + mock_time.side_effect = [ + start, + start + 1.0, + start + 2.1, + start + 3.0, + start + 4.0, + ] + + tool = JupyterGetIOPubMessageTool() + result = tool(kernel_client=mock_kernel_client, timeout=2.0) + + assert result["timeout_exceeded"] is True, "Should have exceeded timeout" + assert len(result["stdout"]) == 2, ( + "Should capture all stdout messages before timeout" + ) + assert result["stderr"] == [], "Should have no stderr messages" + assert result["execution_results"] == [], ( + "No execution results expected before timeout" + ) + + +def test_error_handling(mock_kernel_client): + """ + Tests that JupyterGetIOPubMessageTool handles error messages properly and logs the traceback. + """ + error_traceback = [ + "Traceback (most recent call last):", + ' File ""', + "NameError: name 'x' is not defined", + ] + mock_kernel_client._messages.extend( + [ + {"msg_type": "error", "content": {"traceback": error_traceback}}, + {"msg_type": "status", "content": {"execution_state": "idle"}}, + ] + ) + + tool = JupyterGetIOPubMessageTool() + result = tool(kernel_client=mock_kernel_client, timeout=2.0) + + assert result["timeout_exceeded"] is False, ( + "Should not exceed timeout with valid idle message" + ) + assert len(result["stderr"]) == 1, "Should capture one error message" + assert "NameError: name 'x' is not defined" in result["stderr"][0], ( + "Error content not captured" + ) + assert result["stdout"] == [], "No stdout messages expected" + assert result["execution_results"] == [], "No execution results expected" + assert result["logs"] == [], "No extra logs expected in this scenario" diff --git a/pkgs/community/swarmauri_tool_jupytergetiopubmessage/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/tests/unit/test___init__.py new file mode 100644 index 000000000..834553a18 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetiopubmessage/tests/unit/test___init__.py @@ -0,0 +1,54 @@ +"""Unit tests for the package initialization of swarmauri_tool_jupytergetiopubmessage. + +This module ensures that __init__.py exposes the JupyterGetIOPubMessageTool class and +that the package version is defined as expected. +""" + +from typing import Callable + +from swarmauri_tool_jupytergetiopubmessage import ( + __version__, + JupyterGetIOPubMessageTool, +) + + +class BaseTest: + """A base test class providing common setup and teardown functionality.""" + + def setup_method(self, method: Callable) -> None: + """ + Perform setup before each test method. + + :param method: The test method for which the setup is being performed. + """ + # Setup logic (if needed) goes here + pass + + def teardown_method(self, method: Callable) -> None: + """ + Perform teardown after each test method. + + :param method: The test method for which the teardown is being performed. + """ + # Teardown logic (if needed) goes here + pass + + +class TestInit(BaseTest): + """Test suite for verifying the swarmauri_tool_jupytergetiopubmessage package initialization.""" + + def test_jupyter_get_iopub_message_tool_existence(self) -> None: + """ + Test that the JupyterGetIOPubMessageTool class is correctly imported from __init__.py. + """ + tool_instance = JupyterGetIOPubMessageTool() + assert tool_instance is not None, ( + "Failed to create an instance of JupyterGetIOPubMessageTool." + ) + + def test_version_defined(self) -> None: + """ + Test that the package version is defined and is not an empty string. + """ + assert __version__, "Package __version__ is missing or empty." + assert isinstance(__version__, str), "__version__ should be a string." diff --git a/pkgs/community/swarmauri_tool_jupytergetshellmessage/LICENSE b/pkgs/community/swarmauri_tool_jupytergetshellmessage/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetshellmessage/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupytergetshellmessage/README.md b/pkgs/community/swarmauri_tool_jupytergetshellmessage/README.md new file mode 100644 index 000000000..785c2d309 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetshellmessage/README.md @@ -0,0 +1,86 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupytergetshellmessage +

+ +--- + +# swarmauri_tool_jupytergetshellmessage + +A dedicated Python package providing a tool to retrieve shell messages from a running Jupyter kernel using jupyter_client. Built on the swarmauri framework, JupyterGetShellMessageTool is suitable for debugging, logging, and diagnostic purposes. + +--- + +## Installation + +To install this package from PyPI, use the following command: + + pip install swarmauri_tool_jupytergetshellmessage + +If you are using Poetry for dependency management, add it to your project by specifying the package name in your pyproject.toml under [tool.poetry.dependencies]: + + [tool.poetry.dependencies] + swarmauri_tool_jupytergetshellmessage = "^0.1.0.dev1" + +Once installed, the package automatically brings in its required dependencies: +• swarmauri_core +• swarmauri_base +• jupyter_client + +No specialized steps beyond a standard Python environment with pip or Poetry are necessary. + +--- + +## Usage + +Below is a simple example illustrating how to retrieve shell messages from a currently running Jupyter kernel. Make sure you have an active Jupyter kernel in the environment you are running this code from (for instance, a notebook server launched via "jupyter notebook" or "jupyter lab"). + +1. Import JupyterGetShellMessageTool: + + from swarmauri_tool_jupytergetshellmessage import JupyterGetShellMessageTool + +2. Instantiate the tool and call it: + + tool = JupyterGetShellMessageTool() + result = tool(timeout=10.0) # Wait up to 10 seconds for shell messages + +3. Inspect the result: + + if "messages" in result: + for msg in result["messages"]: + print("Shell Message:", msg) + else: + print("No messages or error:", result.get("error", "No details")) + +The tool attempts to connect to the active Jupyter kernel, retrieve available shell messages, and return them in a structured dictionary. If no messages are found within the specified timeout, it returns an error message indicating the timeout event. + +--- + +## Dependencies + +• swarmauri_core and swarmauri_base provide the core classes (ComponentBase, ToolBase) for building and integrating tools across the swarmauri ecosystem. +• jupyter_client is leveraged to interface with the running Jupyter kernel, enabling retrieval of shell-based IPC messages. + +These dependencies are automatically installed when installing this package via pip or Poetry. + +--- + +## Contributing + +Issues and feature requests for swarmauri_tool_jupytergetshellmessage are always welcome. Although direct repository interaction details are not included here, feel free to suggest improvements or report problems using the relevant issue tracker. + +--- + +© 2023 Swarmauri. Licensed under the Apache License, Version 2.0. +See the LICENSE file for details. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytergetshellmessage/payload.json b/pkgs/community/swarmauri_tool_jupytergetshellmessage/payload.json new file mode 100644 index 000000000..a486c2435 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetshellmessage/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to retrieve shell messages from a running Jupyter kernel using jupyter_client, useful for debugging execution responses.", + "PACKAGE_ROOT": "swarmauri_tool_jupytergetshellmessage", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterGetShellMessageTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Retrieve messages from the kernel's shell channel.", + "Parse and log the shell messages for diagnostics.", + "Handle message retrieval errors gracefully.", + "Support configurable timeouts for message waiting.", + "Return structured message data.", + "Integrate with cell execution tools for comprehensive output." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "jupyter_client", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytergetshellmessage/pyproject.toml b/pkgs/community/swarmauri_tool_jupytergetshellmessage/pyproject.toml new file mode 100644 index 000000000..cf9818596 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetshellmessage/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupytergetshellmessage" +version = "0.6.1" +description = "A tool designed to retrieve shell messages from a running Jupyter kernel using jupyter_client, useful for debugging execution responses." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupytergetshellmessage/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +jupyter_client = "^8.6.3" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupytergetshellmessagetool = "swarmauri_tool_jupytergetshellmessage:JupyterGetShellMessageTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytergetshellmessage/swarmauri_tool_jupytergetshellmessage/JupyterGetShellMessageTool.py b/pkgs/community/swarmauri_tool_jupytergetshellmessage/swarmauri_tool_jupytergetshellmessage/JupyterGetShellMessageTool.py new file mode 100644 index 000000000..f75a4fcb5 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetshellmessage/swarmauri_tool_jupytergetshellmessage/JupyterGetShellMessageTool.py @@ -0,0 +1,107 @@ +from typing import List, Literal, Dict, Any +import logging +import time + +from pydantic import Field +from jupyter_client import find_connection_file, BlockingKernelClient + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +""" +JupyterGetShellMessageTool.py + +This module defines the JupyterGetShellMessageTool, a component that retrieves messages +from the Jupyter kernel's shell channel. It leverages the ToolBase and ComponentBase +classes from the swarmauri framework to integrate with the system's tool architecture. + +The JupyterGetShellMessageTool supports retrieving and parsing messages for diagnostic +purposes. It includes timeout-based handling to avoid hanging during message retrieval. +""" + + +@ComponentBase.register_type(ToolBase, "JupyterGetShellMessageTool") +class JupyterGetShellMessageTool(ToolBase): + """ + JupyterGetShellMessageTool is a tool designed to retrieve messages from the kernel's shell channel. + It listens for shell messages within a specified timeout, logs them for diagnostics, and returns + the structured messages. + + Attributes: + version (str): The version of the JupyterGetShellMessageTool. + parameters (List[Parameter]): A list of parameters that configure message retrieval. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterGetShellMessageTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="timeout", + type="number", + description="The time in seconds to wait for shell messages before giving up.", + required=False, + ), + ] + ) + name: str = "JupyterGetShellMessageTool" + description: str = "Retrieves messages from the Jupyter kernel's shell channel." + type: Literal["JupyterGetShellMessageTool"] = "JupyterGetShellMessageTool" + + def __call__(self, timeout: float = 5.0) -> Dict[str, Any]: + """ + Retrieves messages from the Jupyter kernel's shell channel within the specified timeout. + + Args: + timeout (float, optional): The number of seconds to wait for shell messages + before timing out. Defaults to 5.0. + + Returns: + Dict[str, Any]: A dictionary containing all retrieved shell messages or an error message. + + Example: + >>> tool = JupyterGetShellMessageTool() + >>> result = tool(timeout=10.0) + >>> print(result) + { + 'messages': [ + {'header': {...}, 'parent_header': {...}, 'metadata': {...}, 'content': {...}, 'buffers': [...]}, + ... + ] + } + """ + messages = [] + try: + connection_file = find_connection_file() # Find the kernel connection file + client = BlockingKernelClient(connection_file=connection_file) + client.load_connection_file() + client.start_channels() + + start_time = time.monotonic() + retrieved_any_message = False + + while time.monotonic() - start_time < timeout: + if client.shell_channel.msg_ready(): + msg = client.shell_channel.get_msg(block=False) + messages.append(msg) + logging.debug(f"Retrieved a shell message: {msg}") + retrieved_any_message = True + else: + time.sleep(0.1) + + client.stop_channels() + + if not retrieved_any_message: + return { + "error": f"No shell messages received within {timeout} seconds." + } + + return {"messages": messages} + + except Exception as e: + logging.exception("Error retrieving shell messages") + return {"error": str(e)} diff --git a/pkgs/community/swarmauri_tool_jupytergetshellmessage/swarmauri_tool_jupytergetshellmessage/__init__.py b/pkgs/community/swarmauri_tool_jupytergetshellmessage/swarmauri_tool_jupytergetshellmessage/__init__.py new file mode 100644 index 000000000..d58566280 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetshellmessage/swarmauri_tool_jupytergetshellmessage/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupytergetshellmessage.JupyterGetShellMessageTool import ( + JupyterGetShellMessageTool, +) + + +__all__ = ["JupyterGetShellMessageTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupytergetshellmessage") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupytergetshellmessage/tests/unit/test_JupyterGetShellMessageTool.py b/pkgs/community/swarmauri_tool_jupytergetshellmessage/tests/unit/test_JupyterGetShellMessageTool.py new file mode 100644 index 000000000..01eadbb90 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetshellmessage/tests/unit/test_JupyterGetShellMessageTool.py @@ -0,0 +1,100 @@ +import pytest +from unittest.mock import patch, MagicMock +from swarmauri_tool_jupytergetshellmessage.swarmauri_tool_jupytergetshellmessage.JupyterGetShellMessageTool import ( + JupyterGetShellMessageTool, +) + + +""" +test_JupyterGetShellMessageTool.py + +This module contains pytest-based unit tests for the JupyterGetShellMessageTool class. +The tests ensure the tool correctly retrieves shell messages from a Jupyter kernel, +handles timeouts, and manages exceptions. +""" + + +def test_class_attributes() -> None: + """ + Test that the class attributes match the expected default values and types. + """ + tool = JupyterGetShellMessageTool() + assert tool.version == "1.0.0", "Version attribute should be '1.0.0'." + assert tool.name == "JupyterGetShellMessageTool", "Unexpected name attribute." + assert tool.type == "JupyterGetShellMessageTool", "Unexpected tool type." + assert "timeout" in [param.name for param in tool.parameters], ( + "Parameter 'timeout' should be in the parameters list." + ) + + +@pytest.mark.parametrize("timeout_value", [0.1, 1.0, 5.0]) +def test_call_method_no_messages(timeout_value: float) -> None: + """ + Verify that calling the tool with no messages available returns + an error indicating no shell messages were received. + """ + mock_client = MagicMock() + # Simulate no messages on the shell channel + mock_client.shell_channel.msg_ready.return_value = False + + with ( + patch( + "swarmauri_tool_jupytergetshellmessage.swarmauri_tool_jupytergetshellmessage.JupyterGetShellMessageTool.find_connection_file", + return_value="fake_connection_file", + ), + patch( + "swarmauri_tool_jupytergetshellmessage.swarmauri_tool_jupytergetshellmessage.JupyterGetShellMessageTool.BlockingKernelClient", + return_value=mock_client, + ), + ): + tool = JupyterGetShellMessageTool() + result = tool(timeout=timeout_value) + assert "error" in result, "Expected an error when no messages are available." + + +def test_call_method_with_messages() -> None: + """ + Verify that when messages are available, the tool retrieves them + and returns them in the 'messages' field. + """ + mock_client = MagicMock() + # Create a fake message + fake_message = {"content": {"text": "Hello, world!"}} + # Simulate message availability + mock_client.shell_channel.msg_ready.side_effect = [True, False] + mock_client.shell_channel.get_msg.return_value = fake_message + + with ( + patch( + "swarmauri_tool_jupytergetshellmessage.swarmauri_tool_jupytergetshellmessage.JupyterGetShellMessageTool.find_connection_file", + return_value="fake_connection_file", + ), + patch( + "swarmauri_tool_jupytergetshellmessage.swarmauri_tool_jupytergetshellmessage.JupyterGetShellMessageTool.BlockingKernelClient", + return_value=mock_client, + ), + ): + tool = JupyterGetShellMessageTool() + result = tool(timeout=1.0) + assert "messages" in result, "Expected 'messages' key in result." + assert len(result["messages"]) == 1, "Expected exactly one retrieved message." + assert result["messages"][0]["content"]["text"] == "Hello, world!", ( + "Message content does not match expected value." + ) + + +def test_call_method_exception_handling() -> None: + """ + Verify that when an exception occurs during message retrieval, + the tool returns an error dictionary. + """ + with patch( + "swarmauri_tool_jupytergetshellmessage.swarmauri_tool_jupytergetshellmessage.JupyterGetShellMessageTool.find_connection_file", + side_effect=RuntimeError("Test Error"), + ): + tool = JupyterGetShellMessageTool() + result = tool() + assert "error" in result, "Expected an error when exception is raised." + assert "Test Error" in result["error"], ( + "Expected 'Test Error' in the error message." + ) diff --git a/pkgs/community/swarmauri_tool_jupytergetshellmessage/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupytergetshellmessage/tests/unit/test___init__.py new file mode 100644 index 000000000..0001a3ac1 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytergetshellmessage/tests/unit/test___init__.py @@ -0,0 +1,77 @@ +""" +Module: test___init__.py + +This module contains pytest-based unit tests for the __init__.py file of the +swarmauri_tool_jupytergetshellmessage package. It ensures that the package +is correctly exporting classes and attributes, including the JupyterGetShellMessageTool +class and the package version. +""" + +from typing import Any + + +# Import from the package's __init__.py file +from swarmauri_tool_jupytergetshellmessage import ( + JupyterGetShellMessageTool, + __version__, + __all__, +) + + +class BaseTestCase: + """ + A simple base class for test cases. In a more complex test suite, this + class could house shared setup logic, tear-down routines, or common + utility methods needed by all test classes. + """ + + def common_setup(self) -> None: + """ + Common setup method for all test classes inheriting from BaseTestCase. + """ + pass + + +class TestInit(BaseTestCase): + """ + TestInit is responsible for validating the exports from the + package's __init__.py file. It ensures that core components + like JupyterGetShellMessageTool are accessible and functional. + """ + + def test_jupyter_get_shell_message_tool_in_all(self) -> None: + """ + Verify that the JupyterGetShellMessageTool class is listed in __all__. + This ensures that it is exposed as part of the package's public API. + """ + self.common_setup() + assert "JupyterGetShellMessageTool" in __all__, ( + "JupyterGetShellMessageTool should be in __all__" + ) + + def test_can_instantiate_jupyter_get_shell_message_tool(self) -> None: + """ + Check that a JupyterGetShellMessageTool instance can be created + without raising any exceptions. + """ + self.common_setup() + tool_instance: Any = JupyterGetShellMessageTool() + assert tool_instance is not None, ( + "Failed to instantiate JupyterGetShellMessageTool." + ) + + def test_version_is_string(self) -> None: + """ + Confirm that __version__ is defined as a string, indicating + the package version is correctly set. + """ + self.common_setup() + assert isinstance(__version__, str), "__version__ should be a string." + + def test_version_is_not_empty(self) -> None: + """ + Check that the version string is not empty. This test ensures that + the package version has been properly assigned. + """ + self.common_setup() + assert __version__ != "", "__version__ should not be empty." diff --git a/pkgs/community/swarmauri_tool_jupyterreadnotebook/LICENSE b/pkgs/community/swarmauri_tool_jupyterreadnotebook/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterreadnotebook/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterreadnotebook/README.md b/pkgs/community/swarmauri_tool_jupyterreadnotebook/README.md new file mode 100644 index 000000000..5a515e3dc --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterreadnotebook/README.md @@ -0,0 +1,129 @@ + + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterreadnotebook +

+ +--- + +# swarmauri_tool_jupyterreadnotebook + +The swarmauri_tool_jupyterreadnotebook package provides a tool (`JupyterReadNotebookTool`) that reads a Jupyter Notebook file from the local filesystem, validates it using nbformat, and returns it for further processing. This is especially useful in scenarios where you need to programmatically read and inspect notebooks, or integrate them into automated workflows. + +## Installation + +1. Make sure you have Python 3.10 or above installed on your system. +2. Install the package using your preferred Python dependency management method. For example, with pip: + • pip install swarmauri_tool_jupyterreadnotebook + + Alternatively, if you use Poetry, add the following to your pyproject.toml under dependencies, then run poetry install: + ```toml + [tool.poetry.dependencies] + swarmauri_tool_jupyterreadnotebook = "*" + ``` +3. Ensure all required dependencies (found in pyproject.toml) are satisfied. This package relies on: + • nbformat for reading and validating notebooks. + • swarmauri_core and swarmauri_base for base tool definitions used throughout the Swarmauri ecosystem. + +4. Once installed, you can immediately import and use the tool in your own project. + +## Usage + +The primary entry point is the JupyterReadNotebookTool class. It inherits from the Swarmauri base class ToolBase and integrates seamlessly into the Swarmauri environment. However, it can also be used independently. + +Here is a simple usage example demonstrating how to invoke the tool in your code: + +---------------------------------------------------------------------------------------------------- +```python +from swarmauri_tool_jupyterreadnotebook import JupyterReadNotebookTool + +def read_notebook_example(): + """ + Demonstrates how to read a Jupyter Notebook from the filesystem using the JupyterReadNotebookTool. + """ + # Instantiate the tool + notebook_reader = JupyterReadNotebookTool() + + # Provide the path to the notebook and optionally specify nbformat version + result = notebook_reader( + notebook_file_path="path_to_your_notebook.ipynb", + as_version=4 + ) + + if "notebook_node" in result: + # Successfully read the notebook + print("Notebook content:") + notebook_data = result["notebook_node"] + # You can inspect the notebook data as needed, e.g., listing cells + for i, cell in enumerate(notebook_data.cells): + print(f"Cell {i} type:", cell.cell_type) + else: + # An error occurred + print("Error reading notebook:", result["error"]) + +read_notebook_example() +``` +---------------------------------------------------------------------------------------------------- + +In this example: +• We instantiate JupyterReadNotebookTool with default settings. +• We call it, passing in the notebook file path and optional nbformat version. +• On a successful read, the dictionary returned will contain a "notebook_node" key with the parsed Jupyter notebook contents. Otherwise, it will contain an "error" key. + +## Detailed Parameters for JupyterReadNotebookTool + +• notebook_file_path (str): REQUIRED - The file path to the Jupyter Notebook to be read. +• as_version (int): OPTIONAL - The nbformat version (e.g., 4) to parse the notebook as. Defaults to 4 if not specified. + +## Internal Logic + +The tool follows these steps: + +1. Reads the specified notebook file from the provided path. +2. Parses the notebook data using the requested nbformat version (default is version 4). +3. Validates notebook data to ensure schema compliance. +4. Returns the notebook data or an error message if something went wrong (such as a missing file or a validation error). + +By leveraging this straightforward approach, the swarmauri_tool_jupyterreadnotebook package helps ensure that your notebooks remain valid, consistent, and ready for further processing. + +--- + +## Dependencies + +Below is a list of primary dependencies used by this package: +• nbformat for reading/executing/validating Jupyter Notebooks. +• swarmauri_core and swarmauri_base for base classes and decorators as required by Swarmauri components. +• pydantic for type validation (where relevant). + +All dependencies are detailed in pyproject.toml. No additional manual installation is needed beyond installing this package. + +--- + +## License + +swarmauri_tool_jupyterreadnotebook is licensed under the Apache License 2.0. See the LICENSE file for more details. + +--- + +© 2025 Swarmauri. All rights reserved. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterreadnotebook/payload.json b/pkgs/community/swarmauri_tool_jupyterreadnotebook/payload.json new file mode 100644 index 000000000..432c1eb4c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterreadnotebook/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool that reads a Jupyter Notebook file using nbformat, converting the JSON file into a NotebookNode object.", + "PACKAGE_ROOT": "swarmauri_tools_jupyterreadnotebook", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterReadNotebookTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Read a notebook file from the filesystem.", + "Convert JSON data into a NotebookNode object.", + "Support different notebook versions via as_version parameter.", + "Log the read operation and handle errors gracefully.", + "Validate the notebook schema after reading.", + "Return the NotebookNode for further processing." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbformat", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterreadnotebook/poetry.lock b/pkgs/community/swarmauri_tool_jupyterreadnotebook/poetry.lock new file mode 100644 index 000000000..9330ee432 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterreadnotebook/poetry.lock @@ -0,0 +1,1552 @@ +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. + +[[package]] +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.8.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "attrs" +version = "25.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, + {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + +[[package]] +name = "certifi" +version = "2025.1.31" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" and sys_platform == \"win32\" or python_version >= \"3.12\" and sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "2.1.1" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + +[[package]] +name = "fastjsonschema" +version = "2.21.1" +description = "Fastest Python implementation of JSON schema" +optional = false +python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, + {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "flake8" +version = "7.1.2" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "flake8-7.1.2-py2.py3-none-any.whl", hash = "sha256:1cbc62e65536f65e6d754dfe6f1bada7f5cf392d6f5db3c2b85892466c3e7c1a"}, + {file = "flake8-7.1.2.tar.gz", hash = "sha256:c586ffd0b41540951ae41af572e6790dbd49fc12b3aa2541685d253d9bd504bd"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.7" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + +[[package]] +name = "jsonschema" +version = "4.23.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] + +[[package]] +name = "jsonschema-specifications" +version = "2024.10.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "jupyter-core" +version = "5.7.2" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "nbformat" +version = "5.10.4" +description = "The Jupyter Notebook format" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, + {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, +] + +[package.dependencies] +fastjsonschema = ">=2.15" +jsonschema = ">=2.6" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "numpy" +version = "2.2.3" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "numpy-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cbc6472e01952d3d1b2772b720428f8b90e2deea8344e854df22b0618e9cce71"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdfe0c22692a30cd830c0755746473ae66c4a8f2e7bd508b35fb3b6a0813d787"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:e37242f5324ffd9f7ba5acf96d774f9276aa62a966c0bad8dae692deebec7716"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:95172a21038c9b423e68be78fd0be6e1b97674cde269b76fe269a5dfa6fadf0b"}, + {file = "numpy-2.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b47c440210c5d1d67e1cf434124e0b5c395eee1f5806fdd89b553ed1acd0a3"}, + {file = "numpy-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0391ea3622f5c51a2e29708877d56e3d276827ac5447d7f45e9bc4ade8923c52"}, + {file = "numpy-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f6b3dfc7661f8842babd8ea07e9897fe3d9b69a1d7e5fbb743e4160f9387833b"}, + {file = "numpy-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1ad78ce7f18ce4e7df1b2ea4019b5817a2f6a8a16e34ff2775f646adce0a5027"}, + {file = "numpy-2.2.3-cp310-cp310-win32.whl", hash = "sha256:5ebeb7ef54a7be11044c33a17b2624abe4307a75893c001a4800857956b41094"}, + {file = "numpy-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:596140185c7fa113563c67c2e894eabe0daea18cf8e33851738c19f70ce86aeb"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:16372619ee728ed67a2a606a614f56d3eabc5b86f8b615c79d01957062826ca8"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5521a06a3148686d9269c53b09f7d399a5725c47bbb5b35747e1cb76326b714b"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:7c8dde0ca2f77828815fd1aedfdf52e59071a5bae30dac3b4da2a335c672149a"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:77974aba6c1bc26e3c205c2214f0d5b4305bdc719268b93e768ddb17e3fdd636"}, + {file = "numpy-2.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d42f9c36d06440e34226e8bd65ff065ca0963aeecada587b937011efa02cdc9d"}, + {file = "numpy-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2712c5179f40af9ddc8f6727f2bd910ea0eb50206daea75f58ddd9fa3f715bb"}, + {file = "numpy-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c8b0451d2ec95010d1db8ca733afc41f659f425b7f608af569711097fd6014e2"}, + {file = "numpy-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9b4a8148c57ecac25a16b0e11798cbe88edf5237b0df99973687dd866f05e1b"}, + {file = "numpy-2.2.3-cp311-cp311-win32.whl", hash = "sha256:1f45315b2dc58d8a3e7754fe4e38b6fce132dab284a92851e41b2b344f6441c5"}, + {file = "numpy-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f48ba6f6c13e5e49f3d3efb1b51c8193215c42ac82610a04624906a9270be6f"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12c045f43b1d2915eca6b880a7f4a256f59d62df4f044788c8ba67709412128d"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:87eed225fd415bbae787f93a457af7f5990b92a334e346f72070bf569b9c9c95"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:712a64103d97c404e87d4d7c47fb0c7ff9acccc625ca2002848e0d53288b90ea"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a5ae282abe60a2db0fd407072aff4599c279bcd6e9a2475500fc35b00a57c532"}, + {file = "numpy-2.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5266de33d4c3420973cf9ae3b98b54a2a6d53a559310e3236c4b2b06b9c07d4e"}, + {file = "numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b787adbf04b0db1967798dba8da1af07e387908ed1553a0d6e74c084d1ceafe"}, + {file = "numpy-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:34c1b7e83f94f3b564b35f480f5652a47007dd91f7c839f404d03279cc8dd021"}, + {file = "numpy-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d8335b5f1b6e2bce120d55fb17064b0262ff29b459e8493d1785c18ae2553b8"}, + {file = "numpy-2.2.3-cp312-cp312-win32.whl", hash = "sha256:4d9828d25fb246bedd31e04c9e75714a4087211ac348cb39c8c5f99dbb6683fe"}, + {file = "numpy-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:83807d445817326b4bcdaaaf8e8e9f1753da04341eceec705c001ff342002e5d"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bfdb06b395385ea9b91bf55c1adf1b297c9fdb531552845ff1d3ea6e40d5aba"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:23c9f4edbf4c065fddb10a4f6e8b6a244342d95966a48820c614891e5059bb50"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:a0c03b6be48aaf92525cccf393265e02773be8fd9551a2f9adbe7db1fa2b60f1"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:2376e317111daa0a6739e50f7ee2a6353f768489102308b0d98fcf4a04f7f3b5"}, + {file = "numpy-2.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fb62fe3d206d72fe1cfe31c4a1106ad2b136fcc1606093aeab314f02930fdf2"}, + {file = "numpy-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52659ad2534427dffcc36aac76bebdd02b67e3b7a619ac67543bc9bfe6b7cdb1"}, + {file = "numpy-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1b416af7d0ed3271cad0f0a0d0bee0911ed7eba23e66f8424d9f3dfcdcae1304"}, + {file = "numpy-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1402da8e0f435991983d0a9708b779f95a8c98c6b18a171b9f1be09005e64d9d"}, + {file = "numpy-2.2.3-cp313-cp313-win32.whl", hash = "sha256:136553f123ee2951bfcfbc264acd34a2fc2f29d7cdf610ce7daf672b6fbaa693"}, + {file = "numpy-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5b732c8beef1d7bc2d9e476dbba20aaff6167bf205ad9aa8d30913859e82884b"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:435e7a933b9fda8126130b046975a968cc2d833b505475e588339e09f7672890"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7678556eeb0152cbd1522b684dcd215250885993dd00adb93679ec3c0e6e091c"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2e8da03bd561504d9b20e7a12340870dfc206c64ea59b4cfee9fceb95070ee94"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:c9aa4496fd0e17e3843399f533d62857cef5900facf93e735ef65aa4bbc90ef0"}, + {file = "numpy-2.2.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4ca91d61a4bf61b0f2228f24bbfa6a9facd5f8af03759fe2a655c50ae2c6610"}, + {file = "numpy-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deaa09cd492e24fd9b15296844c0ad1b3c976da7907e1c1ed3a0ad21dded6f76"}, + {file = "numpy-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:246535e2f7496b7ac85deffe932896a3577be7af8fb7eebe7146444680297e9a"}, + {file = "numpy-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:daf43a3d1ea699402c5a850e5313680ac355b4adc9770cd5cfc2940e7861f1bf"}, + {file = "numpy-2.2.3-cp313-cp313t-win32.whl", hash = "sha256:cf802eef1f0134afb81fef94020351be4fe1d6681aadf9c5e862af6602af64ef"}, + {file = "numpy-2.2.3-cp313-cp313t-win_amd64.whl", hash = "sha256:aee2512827ceb6d7f517c8b85aa5d3923afe8fc7a57d028cffcd522f1c6fd082"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3c2ec8a0f51d60f1e9c0c5ab116b7fc104b165ada3f6c58abf881cb2eb16044d"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ed2cf9ed4e8ebc3b754d398cba12f24359f018b416c380f577bbae112ca52fc9"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39261798d208c3095ae4f7bc8eaeb3481ea8c6e03dc48028057d3cbdbdb8937e"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:783145835458e60fa97afac25d511d00a1eca94d4a8f3ace9fe2043003c678e4"}, + {file = "numpy-2.2.3.tar.gz", hash = "sha256:dbdc15f0c81611925f382dfa97b3bd0bc2c1ce19d4fe50482cb0ddc12ba30020"}, +] + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pandas" +version = "2.2.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pillow" +version = "10.4.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, + {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, + {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, + {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, + {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, + {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, + {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, + {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, + {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, + {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, + {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, + {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, + {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, + {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, + {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, + {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, + {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, + {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, + {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycodestyle" +version = "2.12.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, +] + +[[package]] +name = "pydantic" +version = "2.10.6" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pytest" +version = "8.3.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.25.3" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, + {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-json-report" +version = "1.5.0" +description = "A pytest plugin to report test results as JSON files" +optional = false +python-versions = "*" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pytest-json-report-1.5.0.tar.gz", hash = "sha256:2dde3c647851a19b5f3700729e8310a6e66efb2077d674f27ddea3d34dc615de"}, + {file = "pytest_json_report-1.5.0-py3-none-any.whl", hash = "sha256:9897b68c910b12a2e48dd849f9a284b2c79a732a8a9cb398452ddd23d3c8c325"}, +] + +[package.dependencies] +pytest = ">=3.8.0" +pytest-metadata = "*" + +[[package]] +name = "pytest-metadata" +version = "3.1.1" +description = "pytest plugin for test session metadata" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pytest_metadata-3.1.1-py3-none-any.whl", hash = "sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b"}, + {file = "pytest_metadata-3.1.1.tar.gz", hash = "sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +test = ["black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "tox (>=3.24.5)"] + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, +] + +[package.dependencies] +execnet = ">=2.1" +pytest = ">=7.0.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2025.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, +] + +[[package]] +name = "pywin32" +version = "308" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" and sys_platform == \"win32\" and platform_python_implementation != \"PyPy\" or python_version >= \"3.12\" and sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "referencing" +version = "0.36.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, + {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" +typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rpds-py" +version = "0.22.3" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, +] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "swarmauri-base" +version = "0.6.1.dev13" +description = "This repository includes base classes and mixins for the Swarmauri framework." +optional = false +python-versions = ">=3.10,<3.13" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [] +develop = false + +[package.dependencies] +pydantic = "^2.0" +swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} + +[package.source] +type = "git" +url = "https://github.com/swarmauri/swarmauri-sdk.git" +reference = "mono/dev" +resolved_reference = "129ca244b1f67b908c4ea3a2d75b1c577fe5ddd8" +subdirectory = "pkgs/base" + +[[package]] +name = "swarmauri-core" +version = "0.6.1.dev13" +description = "This repository includes core interfaces for the Swarmauri framework." +optional = false +python-versions = ">=3.10,<3.13" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [] +develop = false + +[package.dependencies] +pydantic = "^2.0" +pyyaml = "^6.0.2" + +[package.source] +type = "git" +url = "https://github.com/swarmauri/swarmauri-sdk.git" +reference = "mono/dev" +resolved_reference = "129ca244b1f67b908c4ea3a2d75b1c577fe5ddd8" +subdirectory = "pkgs/core" + +[[package]] +name = "swarmauri-standard" +version = "0.6.1.dev13" +description = "This repository includes standard components within the Swarmauri framework." +optional = false +python-versions = ">=3.10,<3.13" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [] +develop = false + +[package.dependencies] +aiofiles = "24.1.0" +httpx = "^0.27.0" +joblib = "^1.4.0" +numpy = "*" +pandas = "*" +Pillow = ">=8.0,<11.0" +pydantic = "^2.9.2" +requests = "^2.32.3" +swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +toml = "^0.10.2" +typing_extensions = "*" + +[package.source] +type = "git" +url = "https://github.com/swarmauri/swarmauri-sdk.git" +reference = "mono/dev" +resolved_reference = "129ca244b1f67b908c4ea3a2d75b1c577fe5ddd8" +subdirectory = "pkgs/standards/swarmauri_standard" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2025.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.10,<3.13" +content-hash = "a9cd5c6c7e814982fece034931cad83fc8cece6d507bb38b7c3a03f6dd94c766" diff --git a/pkgs/community/swarmauri_tool_jupyterreadnotebook/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterreadnotebook/pyproject.toml new file mode 100644 index 000000000..33949516e --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterreadnotebook/pyproject.toml @@ -0,0 +1,57 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterreadnotebook" +version = "0.6.1" +description = "A tool that reads a Jupyter Notebook file using nbformat, converting the JSON file into a NotebookNode object." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterreadnotebook/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbformat = "^5.10.4" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "integration: Integration tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterreadnotebooktool = "swarmauri_tool_jupyterreadnotebook:JupyterReadNotebookTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterreadnotebook/swarmauri_tool_jupyterreadnotebook/JupyterReadNotebookTool.py b/pkgs/community/swarmauri_tool_jupyterreadnotebook/swarmauri_tool_jupyterreadnotebook/JupyterReadNotebookTool.py new file mode 100644 index 000000000..e04ca112d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterreadnotebook/swarmauri_tool_jupyterreadnotebook/JupyterReadNotebookTool.py @@ -0,0 +1,102 @@ +from typing import List, Literal, Dict, Any +import logging +import nbformat +from pydantic import Field +from nbformat.validator import NotebookValidationError + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +""" +JupyterReadNotebookTool.py + +This module defines the JupyterReadNotebookTool, a component that reads a Jupyter notebook file +from the filesystem, parses it into a NotebookNode object (using a specified nbformat version), +validates its integrity, and returns the resulting node for further processing. This component +inherits from the ToolBase class to seamlessly integrate with the system's tool architecture. +""" + + +@ComponentBase.register_type(ToolBase, "JupyterReadNotebookTool") +class JupyterReadNotebookTool(ToolBase): + """ + JupyterReadNotebookTool is a tool that reads a Jupyter notebook file from the filesystem + and returns a validated NotebookNode object. It supports specifying an nbformat version + for parsing the file, logs the read operation, and handles both file and validation errors. + + Attributes: + version (str): The version of the JupyterReadNotebookTool. + parameters (List[Parameter]): A list of parameters required to read the notebook file. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterReadNotebookTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_file_path", + type="string", + description="The file path to the Jupyter notebook.", + required=True, + ), + Parameter( + name="as_version", + type="integer", + description="nbformat version to parse the notebook with (e.g., 4).", + required=False, + ), + ] + ) + name: str = "JupyterReadNotebookTool" + description: str = ( + "Reads a Jupyter notebook file from the filesystem, parses it into a " + "NotebookNode, validates its schema, and returns the node." + ) + type: Literal["JupyterReadNotebookTool"] = "JupyterReadNotebookTool" + + def __call__(self, notebook_file_path: str, as_version: int = 4) -> Dict[str, Any]: + """ + Reads a Jupyter notebook from the filesystem and returns it as a validated NotebookNode. + + Args: + notebook_file_path (str): The file path to the Jupyter notebook. + as_version (int, optional): The nbformat version to parse the file as. Defaults to 4. + + Returns: + Dict[str, Any]: A dictionary containing either the NotebookNode object under the + "notebook_node" key or an "error" key with a message if an error + occurred. + """ + logger = logging.getLogger(__name__) + logger.info( + "Attempting to read Jupyter notebook from '%s' with nbformat version '%d'.", + notebook_file_path, + as_version, + ) + + try: + # Read and parse the notebook + nb_data = nbformat.read(notebook_file_path, as_version=as_version) + # Validate the notebook's schema + nbformat.validate(nb_data) + logger.info( + "Successfully read and validated notebook from '%s'.", + notebook_file_path, + ) + return {"notebook_node": nb_data} + except FileNotFoundError: + error_message = f"File not found: {notebook_file_path}" + logger.error(error_message) + return {"error": error_message} + except NotebookValidationError as e: + error_message = f"Notebook validation error: {str(e)}" + logger.error(error_message) + return {"error": error_message} + except Exception as e: + error_message = f"Failed to read notebook: {str(e)}" + logger.error(error_message) + return {"error": error_message} diff --git a/pkgs/community/swarmauri_tool_jupyterreadnotebook/swarmauri_tool_jupyterreadnotebook/__init__.py b/pkgs/community/swarmauri_tool_jupyterreadnotebook/swarmauri_tool_jupyterreadnotebook/__init__.py new file mode 100644 index 000000000..1c441f17a --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterreadnotebook/swarmauri_tool_jupyterreadnotebook/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterreadnotebook.JupyterReadNotebookTool import ( + JupyterReadNotebookTool, +) + + +__all__ = ["JupyterReadNotebookTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterreadnotebook") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterreadnotebook/tests/unit/test_JupyterReadNotebookTool.py b/pkgs/community/swarmauri_tool_jupyterreadnotebook/tests/unit/test_JupyterReadNotebookTool.py new file mode 100644 index 000000000..0f4f7c6f7 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterreadnotebook/tests/unit/test_JupyterReadNotebookTool.py @@ -0,0 +1,158 @@ +""" +test_JupyterReadNotebookTool.py + +This module contains pytest-based unit tests for the JupyterReadNotebookTool class. +It verifies the functionality of reading and validating Jupyter notebooks, as well +as handling various error conditions. +""" + +import pytest +from unittest.mock import patch, MagicMock +from nbformat.validator import NotebookValidationError +from swarmauri_tool_jupyterreadnotebook.JupyterReadNotebookTool import ( + JupyterReadNotebookTool, +) + + +def fake_nb_validation(*args, **kwargs): + """ + Helper function to simulate a notebook validation error by raising + NotebookValidationError with a dummy exception object that has the + required attributes: 'message', 'instance', 'validator', 'relative_schema_path', + and 'relative_path'. + """ + from types import SimpleNamespace + + dummy = SimpleNamespace( + message="Notebook is invalid", + instance="dummy instance", + validator="dummy validator", + relative_schema_path=[], # Required for formatting the error + relative_path=[], # Added to satisfy the expected attribute + ) + raise NotebookValidationError(dummy) + + +@pytest.fixture +def jupyter_read_notebook_tool() -> JupyterReadNotebookTool: + """ + A pytest fixture that returns an instance of JupyterReadNotebookTool for use in tests. + """ + return JupyterReadNotebookTool() + + +def test_tool_initialization( + jupyter_read_notebook_tool: JupyterReadNotebookTool, +) -> None: + """ + Test that the tool initializes correctly with the expected default values. + """ + assert jupyter_read_notebook_tool.version == "1.0.0" + assert len(jupyter_read_notebook_tool.parameters) == 2 + assert jupyter_read_notebook_tool.name == "JupyterReadNotebookTool" + assert jupyter_read_notebook_tool.description.startswith("Reads a Jupyter notebook") + assert jupyter_read_notebook_tool.type == "JupyterReadNotebookTool" + + +@patch("nbformat.read") +@patch("nbformat.validate") +def test_call_success( + mock_validate: MagicMock, + mock_read: MagicMock, + jupyter_read_notebook_tool: JupyterReadNotebookTool, +) -> None: + """ + Test successful reading and validating of a Jupyter notebook. + """ + # Arrange + mock_notebook_node = {"cells": [], "metadata": {}} + mock_read.return_value = mock_notebook_node + + # Act + result = jupyter_read_notebook_tool("dummy_path", as_version=4) + + # Assert + mock_read.assert_called_once_with("dummy_path", as_version=4) + mock_validate.assert_called_once_with(mock_notebook_node) + assert "notebook_node" in result + assert result["notebook_node"] == mock_notebook_node + + +@patch("nbformat.read", side_effect=FileNotFoundError) +def test_call_file_not_found( + mock_read: MagicMock, jupyter_read_notebook_tool: JupyterReadNotebookTool +) -> None: + """ + Test handling of the FileNotFoundError when the notebook file is absent. + """ + result = jupyter_read_notebook_tool("non_existent_path.ipynb", as_version=4) + assert "error" in result + assert "File not found" in result["error"] + mock_read.assert_called_once() + + +@patch("nbformat.read", return_value={"cells": [], "metadata": {}}) +@patch("nbformat.validate", side_effect=fake_nb_validation) +def test_call_validation_error( + mock_validate: MagicMock, + mock_read: MagicMock, + jupyter_read_notebook_tool: JupyterReadNotebookTool, +) -> None: + """ + Test handling of a NotebookValidationError when the notebook structure is invalid. + """ + result = jupyter_read_notebook_tool("invalid_notebook.ipynb", as_version=4) + assert "error" in result + assert "Notebook validation error" in result["error"] + mock_read.assert_called_once() + mock_validate.assert_called_once() + + +@patch("nbformat.read", side_effect=Exception("Unexpected error")) +def test_call_unexpected_exception( + mock_read: MagicMock, jupyter_read_notebook_tool: JupyterReadNotebookTool +) -> None: + """ + Test that an unexpected exception is handled gracefully with an appropriate error message. + """ + result = jupyter_read_notebook_tool("any_path.ipynb", as_version=4) + assert "error" in result + assert "Failed to read notebook" in result["error"] + mock_read.assert_called_once() + + +@patch("nbformat.read") +@patch("nbformat.validate") +def test_call_non_empty_read( + mock_validate: MagicMock, + mock_read: MagicMock, + jupyter_read_notebook_tool: JupyterReadNotebookTool, +) -> None: + """ + Test reading and validating a non-empty Jupyter notebook. + """ + # Arrange: Create a non-empty notebook node + non_empty_notebook = { + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": "print('Hello, world!')", + } + ], + "metadata": {"language_info": {"name": "python"}}, + } + mock_read.return_value = non_empty_notebook + + # Act + result = jupyter_read_notebook_tool("non_empty_notebook.ipynb", as_version=4) + + # Assert + mock_read.assert_called_once_with("non_empty_notebook.ipynb", as_version=4) + mock_validate.assert_called_once_with(non_empty_notebook) + assert "notebook_node" in result + assert result["notebook_node"] == non_empty_notebook + # Ensure the notebook contains at least one cell + assert len(result["notebook_node"]["cells"]) > 0 diff --git a/pkgs/community/swarmauri_tool_jupyterreadnotebook/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterreadnotebook/tests/unit/test___init__.py new file mode 100644 index 000000000..a780e4eed --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterreadnotebook/tests/unit/test___init__.py @@ -0,0 +1,48 @@ +# swarmauri_tool_jupyterreadnotebook/tests/unit/test___init__.py + +""" +Module containing unit tests for the initialization of the +swarmauri_tools_jupyterreadnotebook package. +""" + +from swarmauri_base.tools.ToolBase import ToolBase + + +class TestPackageInitialization: + """ + Class containing tests for the initialization of the + swarmauri_tools_jupyterreadnotebook package. + """ + + def test_jupyter_read_notebook_tool_import(self) -> None: + """ + Test that JupyterReadNotebookTool is successfully imported from the package's __init__. + """ + from swarmauri_tool_jupyterreadnotebook import JupyterReadNotebookTool + + # Verify we can access the class + assert JupyterReadNotebookTool is not None, ( + "JupyterReadNotebookTool should be imported from the package init file." + ) + + def test_package_version(self) -> None: + """ + Test that the package __version__ string is available and non-empty. + """ + from swarmauri_tool_jupyterreadnotebook import __version__ + + # Verify the version is a non-empty string + assert isinstance(__version__, str), "__version__ should be a string." + assert len(__version__) > 0, "__version__ should not be an empty string." + + def test_jupyter_read_notebook_tool_inherits_correctly(self) -> None: + """ + Test that JupyterReadNotebookTool inherits from the expected base class. + This test is provided as an example if there is a known base class to check against. + """ + from swarmauri_tool_jupyterreadnotebook import JupyterReadNotebookTool + + # Here we check only as an example scenario; adjust accordingly for real use. + assert issubclass(JupyterReadNotebookTool, ToolBase) is True, ( + "Inherit from ToolBase if JupyterReadNotebookTool must extend a known base class." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterruncell/LICENSE b/pkgs/community/swarmauri_tool_jupyterruncell/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterruncell/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterruncell/README.md b/pkgs/community/swarmauri_tool_jupyterruncell/README.md new file mode 100644 index 000000000..6885845ae --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterruncell/README.md @@ -0,0 +1,88 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterruncell +

+ +--- + +# swarmauri_tool_jupyterruncell + +This package provides a specialized tool for executing Python code cells interactively, capturing output and errors, and optionally applying timeouts. It is designed to integrate seamlessly with the broader Swarmauri tool ecosystem. + +## Installation + +To install the package from PyPI: + +1. Make sure you have Python 3.10 or newer. +2. Install using pip: + + pip install swarmauri_tool_jupyterruncell + +3. Once installed, you can import and use the tool in your Python scripts or Jupyter notebooks. + +If you prefer using Poetry, add the dependency to your pyproject.toml and install accordingly. + +## Usage + +Below is a simple example of how to utilize the JupyterRunCellTool in your Python code. This tool inherits from the Swarmauri base classes, ensuring it integrates into your existing Swarmauri-based projects. + +Example of usage in a Python script or Jupyter notebook: + +-------------------------------------------------------------------------------- + +from swarmauri_tool_jupyterruncell import JupyterRunCellTool + +# Instantiate the tool +tool = JupyterRunCellTool() + +# Simple code execution +result = tool(code="print('Hello from JupyterRunCellTool!')", timeout=5) + +if result["success"]: + print("Cell Output:", result["cell_output"]) + print("No errors captured.") +else: + print("Cell Output:", result["cell_output"]) + print("Error Output:", result["error_output"]) + +-------------------------------------------------------------------------------- + +In this example: +• code: The Python code to run (as a string). +• timeout: Optional parameter specifying the maximum number of seconds allowed for execution. Set to 0 or omit to disable timeouts. + +The returned dictionary includes: +• cell_output: The captured stdout from the executed cell. +• error_output: Any error messages or exceptions encountered. +• success: A boolean indicating if execution was completed without unhandled exceptions. + +## Additional Information + +• The tool is designed to work within an active IPython session. +• If no IPython session is detected, the tool will report an error. +• Use the timeout feature to prevent indefinite execution of code blocks. + +## Dependencies + +This package depends on: +• Python 3.10 or newer. +• IPython for interactive cell execution. +• swarmauri_core and swarmauri_base for Swarmauri integration. + +These dependencies are automatically installed when you install this package from PyPI, so no additional manual steps are required. + +--- + +Maintained by the Swarmauri team under the Apache-2.0 License. +Please visit our PyPI page for the latest releases and updates. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterruncell/payload.json b/pkgs/community/swarmauri_tool_jupyterruncell/payload.json new file mode 100644 index 000000000..406a4b567 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterruncell/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to execute a single code cell in an IPython interactive shell, mimicking notebook cell execution.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterruncell", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterRunCellTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Execute a code cell in an interactive IPython shell.", + "Capture cell outputs and errors.", + "Log the execution process.", + "Handle execution timeouts and errors gracefully.", + "Return the cell output for further processing.", + "Integrate with automated testing workflows." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "IPython", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterruncell/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterruncell/pyproject.toml new file mode 100644 index 000000000..28e5d0524 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterruncell/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterruncell" +version = "0.6.1" +description = "A tool designed to execute a single code cell in an IPython interactive shell, mimicking notebook cell execution." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterruncell/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri dependencies +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base" } +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Other dependencies +IPython = "^8.32.0" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterruncelltool = "swarmauri_tool_jupyterruncell:JupyterRunCellTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterruncell/swarmauri_tool_jupyterruncell/JupyterRunCellTool.py b/pkgs/community/swarmauri_tool_jupyterruncell/swarmauri_tool_jupyterruncell/JupyterRunCellTool.py new file mode 100644 index 000000000..985818f25 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterruncell/swarmauri_tool_jupyterruncell/JupyterRunCellTool.py @@ -0,0 +1,168 @@ +""" +JupyterRunCellTool.py + +This module defines the JupyterRunCellTool, a component that executes Python code cells +in an interactive IPython environment. It captures the standard output and standard error +streams, handles timeouts, and returns the results for further processing. The tool +integrates seamlessly with the swarmauri tool architecture and supports automated +testing workflows. +""" + +import logging +import signal +import io +import traceback +from typing import List, Literal, Optional, Dict, Any +from pydantic import Field + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +# Configure a logger for this module. +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +def _timeout_handler(signum, frame): + """ + Signal handler to raise a TimeoutError when the signal is emitted. + """ + raise TimeoutError("Cell execution timed out.") + + +@ComponentBase.register_type(ToolBase, "JupyterRunCellTool") +class JupyterRunCellTool(ToolBase): + """ + JupyterRunCellTool is a tool that executes Python code within an interactive IPython shell. + It captures the stdout and stderr streams, handles execution timeouts, logs the process, + and returns the output for further processing. + + Attributes: + version (str): The version of the JupyterRunCellTool. + parameters (List[Parameter]): A list of parameters that define the code snippet to be run + and the optional timeout in seconds. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterRunCellTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="code", + type="string", + description="The Python code to run in the IPython cell.", + required=True, + ), + Parameter( + name="timeout", + type="number", + description="Optional timeout (in seconds) for the code execution. Default is 0 (no timeout).", + required=False, + default=0, + ), + ] + ) + name: str = "JupyterRunCellTool" + description: str = ( + "Executes Python code in an IPython environment, capturing stdout and stderr." + ) + type: Literal["JupyterRunCellTool"] = "JupyterRunCellTool" + + def __call__(self, code: str, timeout: Optional[float] = 0) -> Dict[str, Any]: + """ + Executes the provided Python code in an interactive IPython session. + + Args: + code (str): The Python code to execute in a cell. + timeout (float, optional): The maximum amount of time (in seconds) to allow for + code execution. If 0 or not provided, no timeout + is imposed. Defaults to 0. + + Returns: + Dict[str, Any]: A dictionary containing: + - "cell_output" (str): Captured standard output from running the code cell. + - "error_output" (str): Captured errors or exception traces, if any. + - "success" (bool): Indicates if execution succeeded without any unhandled exceptions. + + Example: + >>> tool = JupyterRunCellTool() + >>> result = tool(\"\"\"print('Hello, world!')\"\"\") + >>> print(result) + { + 'cell_output': 'Hello, world!\\n', + 'error_output': '', + 'success': True + } + """ + import contextlib + from IPython import get_ipython + + logger.info("JupyterRunCellTool called with code:\n%s", code) + logger.info("Timeout set to %s seconds.", timeout) + + # Retrieve the current IPython shell instance + shell = get_ipython() + if shell is None: + logger.error("No active IPython shell found.") + return { + "cell_output": "", + "error_output": "Error: No active IPython shell available.", + "success": False, + } + + # Prepare buffers to capture stdout and stderr + output_buffer = io.StringIO() + error_buffer = io.StringIO() + + # Set up a signal handler if a timeout is specified + original_handler = signal.getsignal(signal.SIGALRM) + if timeout and timeout > 0: + signal.signal(signal.SIGALRM, _timeout_handler) + signal.alarm(int(timeout)) + + try: + # Redirect stdout and stderr to capture them + with ( + contextlib.redirect_stdout(output_buffer), + contextlib.redirect_stderr(error_buffer), + ): + shell.run_cell(code) + cell_output = output_buffer.getvalue() + error_output = error_buffer.getvalue() + logger.info("Cell execution completed.") + logger.debug("Captured stdout: %s", cell_output.strip()) + logger.debug("Captured stderr: %s", error_output.strip()) + + return { + "cell_output": cell_output, + "error_output": error_output, + "success": True, + } + + except TimeoutError as e: + logger.error("TimeoutError: %s", str(e)) + # Attempt to gather partial outputs + cell_output = output_buffer.getvalue() + error_output = error_buffer.getvalue() + f"\nTimeoutError: {str(e)}" + return { + "cell_output": cell_output, + "error_output": error_output, + "success": False, + } + except Exception as e: + logger.error("An error occurred during cell execution: %s", str(e)) + traceback_str = traceback.format_exc() + cell_output = output_buffer.getvalue() + error_output = error_buffer.getvalue() + "\n" + traceback_str + return { + "cell_output": cell_output, + "error_output": error_output, + "success": False, + } + finally: + # Disable the alarm and restore the original handler + signal.alarm(0) + signal.signal(signal.SIGALRM, original_handler) diff --git a/pkgs/community/swarmauri_tool_jupyterruncell/swarmauri_tool_jupyterruncell/__init__.py b/pkgs/community/swarmauri_tool_jupyterruncell/swarmauri_tool_jupyterruncell/__init__.py new file mode 100644 index 000000000..68e58e6ad --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterruncell/swarmauri_tool_jupyterruncell/__init__.py @@ -0,0 +1,17 @@ +from swarmauri_tool_jupyterruncell.JupyterRunCellTool import JupyterRunCellTool + + +__all__ = ["JupyterRunCellTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterruncell") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterruncell/tests/unit/test_JupyterRunCellTool.py b/pkgs/community/swarmauri_tool_jupyterruncell/tests/unit/test_JupyterRunCellTool.py new file mode 100644 index 000000000..f5f1892fa --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterruncell/tests/unit/test_JupyterRunCellTool.py @@ -0,0 +1,83 @@ +""" +test_JupyterRunCellTool.py + +This module contains pytest-based unit tests for the JupyterRunCellTool class. It verifies +the tool's functionality under various scenarios, including normal execution, error +handling, and timeout handling. +""" + +from swarmauri_tool_jupyterruncell.JupyterRunCellTool import JupyterRunCellTool + + +def test_jupyter_run_cell_tool_basic() -> None: + """ + Test that JupyterRunCellTool successfully executes a simple Python code snippet + and captures the expected stdout output without errors. + """ + tool = JupyterRunCellTool() + code = "print('Hello, test!')" + result = tool(code=code, timeout=2) + + assert result["success"] is True, "Expected execution success to be True." + assert "Hello, test!" in result["cell_output"], ( + "Expected 'Hello, test!' in cell output." + ) + assert result["error_output"] == "", "Expected empty error output." + + +def test_jupyter_run_cell_tool_error_handling() -> None: + """ + Test that JupyterRunCellTool captures exceptions and returns them correctly in the + error output, setting the success flag to False. + """ + tool = JupyterRunCellTool() + code = "raise ValueError('Test error')" + result = tool(code=code, timeout=2) + + assert result["success"] is False, ( + "Expected execution success to be False due to exception." + ) + assert "ValueError" in result["error_output"], ( + "Expected 'ValueError' in error output." + ) + assert "Test error" in result["error_output"], ( + "Expected 'Test error' message in error output." + ) + + +def test_jupyter_run_cell_tool_syntax_error() -> None: + """ + Test that JupyterRunCellTool handles syntax errors by capturing the error details + and setting success to False. + """ + tool = JupyterRunCellTool() + code = "This is not valid Python code!" + result = tool(code=code, timeout=2) + + assert result["success"] is False, ( + "Expected execution success to be False due to syntax error." + ) + assert "SyntaxError" in result["error_output"], ( + "Expected 'SyntaxError' in error output." + ) + + +def test_jupyter_run_cell_tool_timeout() -> None: + """ + Test that JupyterRunCellTool respects the timeout parameter and raises a TimeoutError + if the code execution exceeds the specified limit. + """ + tool = JupyterRunCellTool() + code = """ +import time +time.sleep(2) +""" + result = tool(code=code, timeout=1) + + assert result["success"] is False, "Expected success to be False due to timeout." + assert "TimeoutError" in result["error_output"], ( + "Expected 'TimeoutError' in error output." + ) + assert "Cell execution timed out." in result["error_output"], ( + "Expected timeout message in error output." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterruncell/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterruncell/tests/unit/test___init__.py new file mode 100644 index 000000000..d6ca99ceb --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterruncell/tests/unit/test___init__.py @@ -0,0 +1,37 @@ +""" +This module contains pytest-based unit tests for the swarmauri_tool_jupyterruncell package's __init__.py file. +It ensures that the package initialization is correct and that exposed classes and variables behave as expected. +""" + +from swarmauri_tool_jupyterruncell import JupyterRunCellTool, __version__ + + +def test_jupyter_run_cell_tool_import() -> None: + """ + Test that the JupyterRunCellTool class is imported from the package's __init__.py. + Ensures that the class is accessible after the package is initialized. + """ + assert JupyterRunCellTool is not None, ( + "JupyterRunCellTool should be exposed by __init__.py" + ) + + +def test_jupyter_run_cell_tool_instantiation() -> None: + """ + Test that the JupyterRunCellTool class can be instantiated without errors. + Ensures that the constructor functions correctly. + """ + instance = JupyterRunCellTool() + assert isinstance(instance, JupyterRunCellTool), ( + "Instance should be of type JupyterRunCellTool" + ) + + +def test_package_version_is_defined() -> None: + """ + Test that the package's __version__ attribute is defined. + Ensures that the version is accessible after initialization. + """ + assert __version__ is not None, "__version__ should be defined in __init__.py" + assert isinstance(__version__, str), "__version__ should be a string" + # If needed, further checks on version string format could be added here. diff --git a/pkgs/community/swarmauri_tool_jupytershutdownkernel/LICENSE b/pkgs/community/swarmauri_tool_jupytershutdownkernel/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytershutdownkernel/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupytershutdownkernel/README.md b/pkgs/community/swarmauri_tool_jupytershutdownkernel/README.md new file mode 100644 index 000000000..fba3e3623 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytershutdownkernel/README.md @@ -0,0 +1,84 @@ + +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupytershutdownkernel +

+ +--- + +# swarmauri_tool_jupytershutdownkernel + +The swarmauri_tool_jupytershutdownkernel package provides a straightforward solution to shut down a running Jupyter kernel programmatically. It uses jupyter_client under the hood and is integrated into the Swarmauri framework ecosystem. This tool can be useful for automated resource management, testing scenarios that require repeated kernel restarts, or any workflow that programmatically terminates Jupyter kernels. + +## Installation + +You can install this module directly via PyPI using pip: + + pip install swarmauri_tool_jupytershutdownkernel + +This will install the package and its dependencies, including jupyter_client and the Swarmauri libraries required by JupyterShutdownKernelTool. + +Ensure you are running a Python version between 3.10 and 3.13, and that you have the appropriate Swarmauri core/base packages installed. Typically, pip will handle these dependencies automatically. + +## Usage + +After installation, you can use the JupyterShutdownKernelTool to shut down a running Jupyter kernel by referencing it within your Python scripts or tools. + +Here’s a quick example of how to import and use JupyterShutdownKernelTool: + +-------------------------------------------------------------------------------- +Example: + +from swarmauri_tool_jupytershutdownkernel import JupyterShutdownKernelTool + +def shutdown_kernel_example(kernel_identifier: str): + """ + Demonstrates shutting down a Jupyter kernel using the JupyterShutdownKernelTool. + """ + # Instantiate the tool + shutdown_tool = JupyterShutdownKernelTool() + + # Perform kernel shutdown + response = shutdown_tool(kernel_id=kernel_identifier, shutdown_timeout=5) + + # Print the result + print(response) +-------------------------------------------------------------------------------- + +1. Create an instance of JupyterShutdownKernelTool. +2. Invoke it like a function, passing the kernel_id (the unique identifier for your kernel) and an optional shutdown_timeout in seconds. +3. The method returns a dictionary with the key-value pairs indicating whether the shutdown was successful or if an error occurred. + +### Detailed Usage Instructions + +• Ensure the kernel you want to shut down is running and that its connection file is accessible. +• Pass the kernel's ID or name to the tool. +• Optionally configure the shutdown_timeout parameter (default is 5s) to give the tool more or less time to perform a graceful shutdown. +• Check the returned dictionary to confirm a successful shutdown or to see an error message for troubleshooting. + +### Dependencies + +• jupyter_client – Underlies the kernel shutdown implementation. +• swarmauri_core / swarmauri_base – Provide the foundational classes (ComponentBase and ToolBase). +• pydantic – Used internally for type validation in Swarmauri parameters. + +Below is a reference to the core files where the functionality resides: + +1. JupyterShutdownKernelTool.py +2. __init__.py +3. pyproject.toml + +In particular, JupyterShutdownKernelTool.py includes the main logic for stopping a Jupyter kernel and handles creditable outcomes like missing kernels, missing connection files, or forced terminations if the kernel does not shut down gracefully within the allotted time. + +We hope this tool helps you manage Jupyter kernels more effectively, freeing you to focus on other aspects of your workflows! \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytershutdownkernel/payload.json b/pkgs/community/swarmauri_tool_jupytershutdownkernel/payload.json new file mode 100644 index 000000000..5c8e09280 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytershutdownkernel/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to shut down a running Jupyter kernel programmatically using jupyter_client, releasing all associated resources.", + "PACKAGE_ROOT": "swarmauri_tool_jupytershutdownkernel", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterShutdownKernelTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Gracefully shutdown a running Jupyter kernel.", + "Ensure all kernel resources are released.", + "Log shutdown events and handle errors.", + "Support timeout for kernel shutdown operations.", + "Return confirmation of successful kernel termination.", + "Integrate with the start kernel tool for lifecycle management." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "jupyter_client", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytershutdownkernel/pyproject.toml b/pkgs/community/swarmauri_tool_jupytershutdownkernel/pyproject.toml new file mode 100644 index 000000000..81244f898 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytershutdownkernel/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupytershutdownkernel" +version = "0.6.1" +description = "A tool designed to shut down a running Jupyter kernel programmatically using jupyter_client, releasing all associated resources." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupytershutdownkernel/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +jupyter_client = "^8.6.3" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupytershutdownkerneltool = "swarmauri_tool_jupytershutdownkernel:JupyterShutdownKernelTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytershutdownkernel/swarmauri_tool_jupytershutdownkernel/JupyterShutdownKernelTool.py b/pkgs/community/swarmauri_tool_jupytershutdownkernel/swarmauri_tool_jupytershutdownkernel/JupyterShutdownKernelTool.py new file mode 100644 index 000000000..c009d5884 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytershutdownkernel/swarmauri_tool_jupytershutdownkernel/JupyterShutdownKernelTool.py @@ -0,0 +1,148 @@ +# JupyterShutdownKernelTool.py +# +# This module defines the JupyterShutdownKernelTool, a component responsible for gracefully +# shutting down a running Jupyter kernel. It integrates with the system's tool architecture +# and handles kernel resource release, logging, error handling, and configurable timeouts. + +import logging +import time +from typing import List, Literal, Dict + +from pydantic import Field +from jupyter_client import KernelManager +from jupyter_client.kernelspec import NoSuchKernel + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +@ComponentBase.register_type(ToolBase, "JupyterShutdownKernelTool") +class JupyterShutdownKernelTool(ToolBase): + """ + JupyterShutdownKernelTool is a tool that gracefully shuts down a running Jupyter kernel. + + This tool integrates with the swarmauri framework and extends the ToolBase class to handle + kernel shutdown logic. It releases all associated kernel resources, supports a configurable + timeout for the shutdown process, logs shutdown events, and returns a confirmation of success + or an error message upon failure. + + Attributes: + version (str): The version of the JupyterShutdownKernelTool. + parameters (List[Parameter]): A list of parameters needed to perform the kernel shutdown. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterShutdownKernelTool"]): The type identifier for this tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="kernel_id", + type="string", + description="Unique identifier or name of the kernel to be shut down.", + required=True, + ), + Parameter( + name="shutdown_timeout", + type="integer", + description="Maximum time in seconds to wait for the kernel to shut down cleanly.", + required=False, + default=5, + ), + ] + ) + name: str = "JupyterShutdownKernelTool" + description: str = ( + "Shuts down a running Jupyter kernel and releases associated resources." + ) + type: Literal["JupyterShutdownKernelTool"] = "JupyterShutdownKernelTool" + + def __call__(self, kernel_id: str, shutdown_timeout: int = 5) -> Dict[str, str]: + """ + Shuts down the specified Jupyter kernel using the provided kernel_id. + + Args: + kernel_id (str): The identifier of the kernel to be shut down. + shutdown_timeout (int): Time in seconds to wait for the kernel to shut down cleanly. + + Returns: + Dict[str, str]: A dictionary containing the status of the operation. If successful, + it includes the kernel_id and a 'success' message. Otherwise, + it returns an 'error' message describing the failure. + + Example: + >>> tool = JupyterShutdownKernelTool() + >>> tool("my_kernel_id", 5) + {'kernel_id': 'my_kernel_id', 'status': 'success', 'message': 'Kernel shut down successfully.'} + + Raises: + Exception: If an unexpected error occurs during kernel shutdown. + """ + logger = logging.getLogger(__name__) + logger.info("Initiating shutdown for kernel_id='%s'", kernel_id) + + try: + manager = KernelManager(kernel_name=kernel_id) + # Attempt to load the connection file; if it doesn’t exist or is invalid, this may fail. + manager.load_connection_file() + + # Request a graceful shutdown. + manager.shutdown_kernel(now=False) + logger.debug( + "Shutdown request sent to kernel_id='%s'; waiting up to %s seconds.", + kernel_id, + shutdown_timeout, + ) + + # Wait for kernel to terminate, polling periodically. + elapsed = 0 + poll_interval = 0.5 + while manager.is_alive() and elapsed < shutdown_timeout: + time.sleep(poll_interval) + elapsed += poll_interval + + # If kernel is still alive, attempt a forced shutdown. + if manager.is_alive(): + logger.warning( + "Kernel did not shut down within %s seconds; forcing shutdown.", + shutdown_timeout, + ) + manager.shutdown_kernel(now=True) + + # Final check to confirm kernel termination. + if manager.is_alive(): + error_message = f"Kernel {kernel_id} could not be shut down." + logger.error(error_message) + return { + "kernel_id": kernel_id, + "status": "error", + "message": error_message, + } + + success_message = f"Kernel {kernel_id} shut down successfully." + logger.info(success_message) + return { + "kernel_id": kernel_id, + "status": "success", + "message": success_message, + } + + except NoSuchKernel: + error_message = f"No such kernel: {kernel_id}." + logger.error(error_message) + return {"kernel_id": kernel_id, "status": "error", "message": error_message} + except FileNotFoundError: + error_message = f"Connection file not found for kernel: {kernel_id}." + logger.error(error_message) + return {"kernel_id": kernel_id, "status": "error", "message": error_message} + except Exception as e: + logger.exception( + "An error occurred while shutting down kernel_id='%s'.", kernel_id + ) + return { + "kernel_id": kernel_id, + "status": "error", + "message": f"Kernel shutdown failed due to unexpected error: {str(e)}", + } diff --git a/pkgs/community/swarmauri_tool_jupytershutdownkernel/swarmauri_tool_jupytershutdownkernel/__init__.py b/pkgs/community/swarmauri_tool_jupytershutdownkernel/swarmauri_tool_jupytershutdownkernel/__init__.py new file mode 100644 index 000000000..04501d5e1 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytershutdownkernel/swarmauri_tool_jupytershutdownkernel/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupytershutdownkernel.JupyterShutdownKernelTool import ( + JupyterShutdownKernelTool, +) + + +__all__ = ["JupyterShutdownKernelTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupytershutdownkernel") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupytershutdownkernel/tests/unit/test_JupyterShutdownKernelTool.py b/pkgs/community/swarmauri_tool_jupytershutdownkernel/tests/unit/test_JupyterShutdownKernelTool.py new file mode 100644 index 000000000..0a87b4f30 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytershutdownkernel/tests/unit/test_JupyterShutdownKernelTool.py @@ -0,0 +1,133 @@ +import inspect +from unittest.mock import patch, MagicMock +from typing import Dict + +from swarmauri_tool_jupytershutdownkernel.JupyterShutdownKernelTool import ( + JupyterShutdownKernelTool, +) +from swarmauri_base.tools.ToolBase import ToolBase +from jupyter_client.kernelspec import NoSuchKernel + + +class TestJupyterShutdownKernelTool: + """ + Test suite for the JupyterShutdownKernelTool class. + """ + + def test_class_inheritance(self) -> None: + tool = JupyterShutdownKernelTool() + assert isinstance(tool, ToolBase), ( + "JupyterShutdownKernelTool does not inherit from ToolBase." + ) + + def test_initial_attributes(self) -> None: + tool = JupyterShutdownKernelTool() + assert tool.name == "JupyterShutdownKernelTool" + assert ( + tool.description + == "Shuts down a running Jupyter kernel and releases associated resources." + ) + assert tool.type == "JupyterShutdownKernelTool" + assert isinstance(tool.parameters, list), "Parameters should be a list." + + # Check parameters exist + kernel_id_param = next( + (p for p in tool.parameters if p.name == "kernel_id"), None + ) + timeout_param = next( + (p for p in tool.parameters if p.name == "shutdown_timeout"), None + ) + + assert kernel_id_param is not None, "Missing required parameter 'kernel_id'." + assert kernel_id_param.required is True, ( + "Parameter 'kernel_id' should be required." + ) + assert timeout_param is not None, ( + "Missing optional parameter 'shutdown_timeout'." + ) + + # Instead of checking the Parameter instance for a default value, + # inspect the __call__ method signature to confirm the default value is 5. + sig = inspect.signature(tool.__call__) + assert sig.parameters["shutdown_timeout"].default == 5, ( + "Default shutdown timeout should be 5." + ) + + @patch( + "swarmauri_tool_jupytershutdownkernel.JupyterShutdownKernelTool.KernelManager" + ) + def test_call_success(self, mock_kernel_manager: MagicMock) -> None: + mock_manager_instance = mock_kernel_manager.return_value + # The shutdown logic calls is_alive() several times: + # 1. While-loop condition (iteration 1) + # 2. While-loop condition (iteration 2) → exit loop + # 3. Forced shutdown check (should not be called) + # 4. Final check confirming kernel is down + mock_manager_instance.is_alive.side_effect = [True, False, False, False] + + tool = JupyterShutdownKernelTool() + result: Dict[str, str] = tool(kernel_id="test_kernel") + + assert result["status"] == "success" + assert "shut down successfully" in result["message"] + mock_manager_instance.shutdown_kernel.assert_called_with(now=False) + + @patch( + "swarmauri_tool_jupytershutdownkernel.JupyterShutdownKernelTool.KernelManager" + ) + def test_call_forced_shutdown(self, mock_kernel_manager: MagicMock) -> None: + mock_manager_instance = mock_kernel_manager.return_value + # For forced shutdown, is_alive() is called multiple times: + # Provide enough responses to cover all calls. + mock_manager_instance.is_alive.side_effect = [True, True, True, True, True] + + tool = JupyterShutdownKernelTool() + result: Dict[str, str] = tool(kernel_id="test_kernel", shutdown_timeout=1) + + assert result["status"] == "error" + assert "could not be shut down" in result["message"] + mock_manager_instance.shutdown_kernel.assert_any_call(now=True) + + @patch( + "swarmauri_tool_jupytershutdownkernel.JupyterShutdownKernelTool.KernelManager" + ) + def test_call_no_such_kernel(self, mock_kernel_manager: MagicMock) -> None: + mock_manager_instance = mock_kernel_manager.return_value + # Instantiate NoSuchKernel with a dummy argument. + mock_manager_instance.load_connection_file.side_effect = NoSuchKernel("dummy") + + tool = JupyterShutdownKernelTool() + result: Dict[str, str] = tool(kernel_id="non_existent") + + assert result["status"] == "error" + assert "No such kernel" in result["message"] + + @patch( + "swarmauri_tool_jupytershutdownkernel.JupyterShutdownKernelTool.KernelManager" + ) + def test_call_connection_file_not_found( + self, mock_kernel_manager: MagicMock + ) -> None: + mock_manager_instance = mock_kernel_manager.return_value + mock_manager_instance.load_connection_file.side_effect = FileNotFoundError + + tool = JupyterShutdownKernelTool() + result: Dict[str, str] = tool(kernel_id="missing_connection_file") + + assert result["status"] == "error" + assert "Connection file not found" in result["message"] + + @patch( + "swarmauri_tool_jupytershutdownkernel.JupyterShutdownKernelTool.KernelManager" + ) + def test_call_unexpected_exception(self, mock_kernel_manager: MagicMock) -> None: + mock_manager_instance = mock_kernel_manager.return_value + mock_manager_instance.load_connection_file.side_effect = RuntimeError( + "Unexpected error" + ) + + tool = JupyterShutdownKernelTool() + result: Dict[str, str] = tool(kernel_id="faulty_kernel") + + assert result["status"] == "error" + assert "unexpected error" in result["message"].lower() diff --git a/pkgs/community/swarmauri_tool_jupytershutdownkernel/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupytershutdownkernel/tests/unit/test___init__.py new file mode 100644 index 000000000..1e4f67d8c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytershutdownkernel/tests/unit/test___init__.py @@ -0,0 +1,31 @@ +"""Unit tests for the swarmauri_tool_jupytershutdownkernel package initialization. + +This test suite ensures that the package's __init__.py properly imports and +exposes the JupyterShutdownKernelTool class, along with other metadata like +the __version__ attribute. +""" + +from swarmauri_tool_jupytershutdownkernel import JupyterShutdownKernelTool, __version__ + + +class TestPackageInitialization: + """Test case class for ensuring the correct exposure of package-level components.""" + + def test_jupyter_shutdown_kernel_tool_existence(self) -> None: + """ + Test that the JupyterShutdownKernelTool class is importable and exposed + by the package's __init__.py. + """ + # Checking that JupyterShutdownKernelTool is not None ensures proper import. + assert JupyterShutdownKernelTool is not None, ( + "JupyterShutdownKernelTool must be exposed by __init__.py." + ) + + def test_version_attribute_existence(self) -> None: + """ + Test that the __version__ attribute is defined and is a valid string + within the package's __init__.py. + """ + # Ensuring that __version__ is a string prevents potential import issues. + assert isinstance(__version__, str), "__version__ must be a string." + assert __version__, "__version__ must not be an empty string." diff --git a/pkgs/community/swarmauri_tool_jupyterstartkernel/LICENSE b/pkgs/community/swarmauri_tool_jupyterstartkernel/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterstartkernel/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterstartkernel/README.md b/pkgs/community/swarmauri_tool_jupyterstartkernel/README.md new file mode 100644 index 000000000..8b43f2f4c --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterstartkernel/README.md @@ -0,0 +1,109 @@ +![Swamauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterstartkernel +

+ +--- + +# swarmauri_tool_jupyterstartkernel + +## Overview +The swarmauri_tool_jupyterstartkernel package provides a tool that programmatically starts a Jupyter kernel using jupyter_client. It integrates seamlessly with the Swarmauri framework to offer flexible kernel initialization, monitoring, and error handling. + +This tool can be particularly useful for dynamic, programmatic execution of notebook cells, automated testing of notebook-based workflows, or other situations where a Python (or alternative language) kernel instance is needed on-demand. + +--- + +## Installation +You can install this package from the Python Package Index (PyPI). Make sure your Python version is between 3.10 and 3.12 (inclusive of 3.10 and exclusive of 3.13): + + pip install swarmauri_tool_jupyterstartkernel + +If your environment uses Poetry, you can add this line to your pyproject.toml under [tool.poetry.dependencies]: + + swarmauri_tool_jupyterstartkernel = "*" + +Note that the tool depends on: +• swarmauri_core +• swarmauri_base +• jupyter_client + +These will be installed automatically when using pip or Poetry. + +--- + +## Usage +Once installed, you can import and create an instance of the JupyterStartKernelTool in your Python code. Below is a simple example showing how to start a kernel and capture the resulting kernel name and ID. + +```python +from swarmauri_tool_jupyterstartkernel import JupyterStartKernelTool + +# Create an instance of the JupyterStartKernelTool +tool = JupyterStartKernelTool() + +# Start a default python3 kernel +results = tool() +print("Default Kernel Results:", results) + +# Start a different kernel by specifying 'kernel_name' +custom_results = tool(kernel_name="python3") +print("Custom Kernel Results:", custom_results) +``` + +### Advanced Usage +You can optionally provide a kernel specification dictionary to configure more complex settings (e.g., environment variables, resource limits, custom arguments). This example shows how you might pass a simple configuration dictionary: + +```python +config_spec = { + "env": { + "MY_CUSTOM_ENV_VAR": "test_value" + } +} + +# Start a kernel with custom specification +results_with_spec = tool(kernel_name="python3", kernel_spec=config_spec) +print("Advanced Kernel Results with Spec:", results_with_spec) +``` + +If a kernel fails to start, the tool returns an error message in the dictionary: + +```python +error_results = tool(kernel_name="non_existent_kernel") +if "error" in error_results: + print("Error starting kernel:", error_results["error"]) +``` + +### Retrieving the Kernel Manager +The JupyterStartKernelTool class stores the KernelManager instance internally for access after a successful start. You can retrieve it at any time using: + +```python +km = tool.get_kernel_manager() +if km: + print("Kernel Manager is available for further operations.") +``` + +--- + +## Dependencies +• swarmauri_core: Provides the base classes and architecture for Swarmauri-type components. +• swarmauri_base: Contains the general ToolBase class and other internal utilities. +• jupyter_client: Manages Jupyter kernel operations, allowing this tool to start and monitor kernels. + +--- + +## License +swarmauri_tool_jupyterstartkernel is distributed under the Apache-2.0 License. +© 2023 Swarmauri. All Rights Reserved. + +For additional support, feel free to open an issue or contact our team for guidance on leveraging this tool within your Swarmauri-based deployments. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterstartkernel/payload.json b/pkgs/community/swarmauri_tool_jupyterstartkernel/payload.json new file mode 100644 index 000000000..6e9d959d1 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterstartkernel/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool designed to start a new Jupyter kernel programmatically using jupyter_client, enabling execution of notebook cells.", + "PACKAGE_ROOT": "swarmauri_tool_jupyterstartkernel", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterStartKernelTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Initialize a new Jupyter kernel instance.", + "Support configuration of kernel specifications.", + "Log kernel start events.", + "Handle initialization errors gracefully.", + "Return the kernel ID for reference.", + "Integrate with further cell execution tools." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "jupyter_client", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterstartkernel/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterstartkernel/pyproject.toml new file mode 100644 index 000000000..34cdef62b --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterstartkernel/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterstartkernel" +version = "0.6.1" +description = "A tool designed to start a new Jupyter kernel programmatically using jupyter_client, enabling execution of notebook cells." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterstartkernel/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base" } +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +jupyter_client = "^8.6.3" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterstartkerneltool = "swarmauri_tool_jupyterstartkernel:JupyterStartKernelTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterstartkernel/swarmauri_tool_jupyterstartkernel/JupyterStartKernelTool.py b/pkgs/community/swarmauri_tool_jupyterstartkernel/swarmauri_tool_jupyterstartkernel/JupyterStartKernelTool.py new file mode 100644 index 000000000..670b357f9 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterstartkernel/swarmauri_tool_jupyterstartkernel/JupyterStartKernelTool.py @@ -0,0 +1,111 @@ +""" +JupyterStartKernelTool.py + +This module defines the JupyterStartKernelTool, a component that starts a Jupyter kernel instance. +It leverages the ToolBase and ComponentBase classes from the swarmauri framework to integrate +seamlessly with the system's tool architecture. + +The JupyterStartKernelTool supports initializing and configuring a new Jupyter kernel instance, +logging kernel start events, handling startup errors gracefully, and returning the kernel ID for +reference. It can also integrate with further tools that execute cells within the started kernel. +""" + +import logging +from typing import List, Literal, Dict, Optional, Any +from pydantic import Field +from jupyter_client import KernelManager + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_core.ComponentBase import ComponentBase +from swarmauri_base.tools.ToolBase import ToolBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterStartKernelTool") +class JupyterStartKernelTool(ToolBase): + """ + JupyterStartKernelTool is a tool that initializes and configures a Jupyter kernel instance. + + Attributes: + version (str): The version of the JupyterStartKernelTool. + parameters (List[Parameter]): A list of parameters that define how the Jupyter kernel + will be initialized. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterStartKernelTool"]): The type identifier for this tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="kernel_name", + type="string", + description="The name of the Jupyter kernel to start (e.g., 'python3').", + required=False, + default="python3", + ), + Parameter( + name="kernel_spec", + type="object", + description="Optional dictionary to configure kernel specifications (if supported).", + required=False, + default=None, + ), + ] + ) + name: str = "JupyterStartKernelTool" + description: str = "Initializes and configures a Jupyter kernel instance." + type: Literal["JupyterStartKernelTool"] = "JupyterStartKernelTool" + + def __call__( + self, kernel_name: str = "python3", kernel_spec: Optional[Dict[str, Any]] = None + ) -> Dict[str, str]: + """ + Starts a new Jupyter kernel instance with the provided kernel name and optional specifications. + + Args: + kernel_name (str): The name of the Jupyter kernel to start. Defaults to "python3". + kernel_spec (Optional[Dict[str, Any]]): Optional dictionary of kernel configuration settings. + + Returns: + Dict[str, str]: A dictionary containing either the 'kernel_id' key with the kernel's identifier + and 'kernel_name', or an 'error' key if the startup fails. + """ + try: + # Initialize the kernel manager + km = KernelManager(kernel_name=kernel_name) + + # If there are kernel specifications to apply, handle them here + if kernel_spec: + # Example placeholder for applying additional configuration + # (In practice, you might apply environment variables or other settings) + logger.debug(f"Applying kernel specification: {kernel_spec}") + + # Start the kernel + km.start_kernel() + kernel_id = km.kernel_id + + # Store the KernelManager for potential further interactions + self._kernel_manager = km + + # Log the successful start + logger.info( + f"Started Jupyter kernel '{kernel_name}' with ID '{kernel_id}'." + ) + return {"kernel_name": kernel_name, "kernel_id": kernel_id} + + except Exception as ex: + # Log and return error details + logger.error(f"Failed to start Jupyter kernel '{kernel_name}': {ex}") + return {"error": str(ex)} + + def get_kernel_manager(self) -> Optional[KernelManager]: + """ + Retrieves the KernelManager instance for the active Jupyter kernel. + + Returns: + Optional[KernelManager]: The KernelManager instance if a kernel is running, otherwise None. + """ + return getattr(self, "_kernel_manager", None) diff --git a/pkgs/community/swarmauri_tool_jupyterstartkernel/swarmauri_tool_jupyterstartkernel/__init__.py b/pkgs/community/swarmauri_tool_jupyterstartkernel/swarmauri_tool_jupyterstartkernel/__init__.py new file mode 100644 index 000000000..b3a4908f8 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterstartkernel/swarmauri_tool_jupyterstartkernel/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterstartkernel.JupyterStartKernelTool import ( + JupyterStartKernelTool, +) + + +__all__ = ["JupyterStartKernelTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterstartkernel") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterstartkernel/tests/unit/test_JupyterStartKernelTool.py b/pkgs/community/swarmauri_tool_jupyterstartkernel/tests/unit/test_JupyterStartKernelTool.py new file mode 100644 index 000000000..0d89ff3e9 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterstartkernel/tests/unit/test_JupyterStartKernelTool.py @@ -0,0 +1,171 @@ +""" +test_JupyterStartKernelTool.py + +This module contains pytest-based test cases for the JupyterStartKernelTool class, which is +responsible for creating and managing Jupyter kernel instances. These tests validate that +the class behaves correctly under different conditions, including normal operation and +error scenarios. +""" + +import pytest +from typing import Dict, Any +from unittest.mock import patch, MagicMock + +from swarmauri_tool_jupyterstartkernel.JupyterStartKernelTool import ( + JupyterStartKernelTool, +) + + +def test_tool_initialization() -> None: + """ + Tests that the JupyterStartKernelTool can be instantiated with default parameters. + Ensures the tool's attributes are set as expected. + """ + tool = JupyterStartKernelTool() + assert tool.name == "JupyterStartKernelTool", ( + "Tool name should match expected default." + ) + assert tool.version == "1.0.0", "Default version should be '1.0.0'." + assert tool.type == "JupyterStartKernelTool", ( + "Tool type should be 'JupyterStartKernelTool'." + ) + assert len(tool.parameters) == 2, ( + "Expected two default parameters (kernel_name, kernel_spec)." + ) + + +@pytest.mark.parametrize( + "kernel_name, expected_kernel_name", + [ + ("python3", "python3"), + ("python2", "python2"), + ], +) +@patch( + "swarmauri_tool_jupyterstartkernel.JupyterStartKernelTool.KernelManager", + autospec=True, +) +def test_call_success( + mock_kernel_manager_class: MagicMock, kernel_name: str, expected_kernel_name: str +) -> None: + """ + Tests that calling the JupyterStartKernelTool successfully starts a kernel + and returns a dictionary with kernel_name and kernel_id. + Uses a mock to avoid starting a real kernel. + """ + mock_kernel_manager = MagicMock() + mock_kernel_manager.kernel_id = "fake_kernel_id" + mock_kernel_manager_class.return_value = mock_kernel_manager + + tool = JupyterStartKernelTool() + result = tool(kernel_name=kernel_name) + + mock_kernel_manager_class.assert_called_once_with(kernel_name=expected_kernel_name) + mock_kernel_manager.start_kernel.assert_called_once() + + assert "kernel_id" in result, "Result should contain 'kernel_id'." + assert result["kernel_name"] == expected_kernel_name, ( + "Kernel name should match the input." + ) + assert result["kernel_id"] == "fake_kernel_id", ( + "Mock kernel ID should match the returned value." + ) + assert tool.get_kernel_manager() is mock_kernel_manager, ( + "Tool should store the KernelManager instance internally." + ) + + +@patch( + "swarmauri_tool_jupyterstartkernel.JupyterStartKernelTool.KernelManager", + autospec=True, +) +def test_call_failure(mock_kernel_manager_class: MagicMock) -> None: + """ + Tests that if the kernel manager raises an exception during start, + the tool returns a dictionary containing 'error' and logs the exception. + """ + mock_kernel_manager = MagicMock() + mock_kernel_manager.start_kernel.side_effect = RuntimeError( + "Failed to start kernel." + ) + mock_kernel_manager_class.return_value = mock_kernel_manager + + tool = JupyterStartKernelTool() + result = tool(kernel_name="invalid_kernel") + + assert "error" in result, "Result should contain an 'error' key on failure." + assert "Failed to start kernel." in result["error"], ( + "Error message should indicate what went wrong." + ) + assert tool.get_kernel_manager() is None, ( + "KernelManager should not be stored if kernel start fails." + ) + + +def test_get_kernel_manager_without_call() -> None: + """ + Tests that get_kernel_manager returns None if the __call__ method has not been invoked + and no kernel has been started. + """ + tool = JupyterStartKernelTool() + km = tool.get_kernel_manager() + assert km is None, "Expected None if no kernel has been started." + + +def test_tool_parameters() -> None: + """ + Tests that the parameters field of the tool can be customized and accessed properly. + """ + custom_params = [ + { + "name": "kernel_name", + "type": "string", + "description": "Customized kernel name parameter.", + "required": True, + "default": "python3", + }, + { + "name": "extra_config", + "type": "object", + "description": "Extra configuration for advanced kernel startup.", + "required": False, + "default": {}, + }, + ] + # Convert dicts to Parameter objects if that is the required usage pattern + # For now, assume direct assignment is sufficient + tool = JupyterStartKernelTool(parameters=custom_params) # type: ignore + assert len(tool.parameters) == 2, "Customized tool should have two parameters." + assert tool.parameters[0]["name"] == "kernel_name", ( + "First parameter should be kernel_name." + ) + assert tool.parameters[1]["name"] == "extra_config", ( + "Second parameter should be extra_config." + ) + assert tool.parameters[1]["default"] == {}, ( + "Default for extra_config should be an empty dict." + ) + + +def test_call_with_kernel_spec() -> None: + """ + Tests that calling the tool with a kernel specification does not raise an error + and returns the correct dictionary structure. This test does not mock the kernel + manager and is intended for demonstration; in real usage, a mock would be used. + """ + tool = JupyterStartKernelTool() + result: Dict[str, Any] = tool( + kernel_name="python3", kernel_spec={"env": {"KEY": "VALUE"}} + ) + + # Since no actual kernel is started in many CI environments, this could fail in real scenarios, + # but we include it here to demonstrate potential usage. + if "error" in result: + assert "error" in result, ( + "If an error occurred due to environment constraints, the result should contain 'error'." + ) + else: + assert "kernel_id" in result, "A successful call should return a 'kernel_id'." + assert "kernel_name" in result, ( + "A successful call should return a 'kernel_name'." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterstartkernel/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterstartkernel/tests/unit/test___init__.py new file mode 100644 index 000000000..7bc19e686 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterstartkernel/tests/unit/test___init__.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# test___init__.py +""" +Unit tests for the swarmauri_tool_jupyterstartkernel package __init__.py file. + +This module provides pytest-based test cases to ensure that the package +initialization logic is correct and the main components are properly exposed. +""" + + +def test_jupyter_start_kernel_tool_is_importable() -> None: + """ + Test that JupyterStartKernelTool is importable from the package's main module. + This ensures that the __init__.py file properly exposes the class. + """ + # Import the class from the root of the package to confirm exposure in __init__.py + from swarmauri_tool_jupyterstartkernel import JupyterStartKernelTool + + assert JupyterStartKernelTool is not None, ( + "JupyterStartKernelTool could not be imported." + ) + + +def test_jupyter_start_kernel_tool_in_all() -> None: + """ + Test that JupyterStartKernelTool is included in the package's __all__ list. + This verifies that the class is declared in __all__. + """ + from swarmauri_tool_jupyterstartkernel import __all__ as exposed + + assert "JupyterStartKernelTool" in exposed, ( + "'JupyterStartKernelTool' not found in __all__." + ) + + +def test_version_exists_and_is_string() -> None: + """ + Test that the __version__ attribute is defined and is a string. + This confirms that the package version is exposed correctly. + """ + from swarmauri_tool_jupyterstartkernel import __version__ + + assert __version__ is not None, "__version__ is not defined." + assert isinstance(__version__, str), ( + f"__version__ should be a string, got {type(__version__)}." + ) diff --git a/pkgs/community/swarmauri_tool_jupytervalidatenotebook/LICENSE b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupytervalidatenotebook/README.md b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/README.md new file mode 100644 index 000000000..c252b43d9 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/README.md @@ -0,0 +1,135 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupytervalidatenotebook +

+ +--- + +# swarmauri_tool_jupytervalidatenotebook + +## Overview +This package provides a tool that validates a Jupyter notebook (NotebookNode) against its JSON schema using nbformat. It is useful for ensuring that your notebooks follow the correct structural and metadata standards required for processing or distribution. The tool can easily be integrated into automated workflows for CI/CD or general code validation processes. + +## Installation + +To install this package using pip: + + pip install swarmauri_tool_jupytervalidatenotebook + +If you are using Poetry, you may add the following line to your pyproject.toml under [tool.poetry.dependencies]: + + swarmauri_tool_jupytervalidatenotebook = "*" + +Then run: + + poetry install + +Make sure that you have a supported version of Python (3.10+), together with the required dependencies as defined in the pyproject.toml (including nbformat, pydantic, typing_extensions, etc.). + +## Usage + +Below is a basic example of how to use the JupyterValidateNotebookTool to validate a notebook: + +------------------------------------------------------------------- + +import logging +import nbformat +from swarmauri_tool_jupytervalidatenotebook import JupyterValidateNotebookTool + +def main(): + # Configure logging to see validation messages: + logging.basicConfig(level=logging.INFO) + + # Create an instance of the validation tool + validator = JupyterValidateNotebookTool() + + # Load a notebook for validation. Make sure the notebook is in the correct format (v4 typically). + notebook = nbformat.read("my_notebook.ipynb", as_version=4) + + # Invoke the validator by calling the tool with the notebook object + validation_result = validator(notebook) + + # Check the outcome + if validation_result["valid"] == "True": + print("Success:", validation_result["report"]) + else: + print("Failure:", validation_result["report"]) + +if __name__ == "__main__": + main() + +------------------------------------------------------------------- + +In this example: +• We import nbformat to read the notebook file into a NotebookNode object. +• We instantiate JupyterValidateNotebookTool. +• We pass our notebook to the tool, which will return a dictionary with "valid" and "report" keys. +• We then inspect those keys to display the results of the validation procedure. + +## Advanced Usage + +You can further customize log handling or implement additional processing of the validation results to suit your workflow. For instance, you might collect statistics, filter notebooks based on validation success, or integrate the tool into multi-step pipelines. + +Logging is handled by the Python logging library. For more production-focused scenarios, configure logging as needed to capture validation details, such as warnings or errors in your notebooks. + +Example with expanded logging: + +------------------------------------------------------------------- + +import logging +import nbformat +from swarmauri_tool_jupytervalidatenotebook import JupyterValidateNotebookTool + +def validate_notebooks(notebook_paths): + logger = logging.getLogger(__name__) + logging.basicConfig(level=logging.INFO) + validator = JupyterValidateNotebookTool() + + for path in notebook_paths: + try: + notebook = nbformat.read(path, as_version=4) + result = validator(notebook) + if result["valid"] == "True": + logger.info(f"{path} passed validation. Details: {result['report']}") + else: + logger.warning(f"{path} failed validation. Error: {result['report']}") + except FileNotFoundError: + logger.error(f"Notebook file not found: {path}") + +if __name__ == "__main__": + notebooks_to_check = ["notebook1.ipynb", "notebook2.ipynb"] + validate_notebooks(notebooks_to_check) + +------------------------------------------------------------------- + +The above approach allows you to queue multiple notebooks for validation, with clear logs about success/failure. + +## Dependencies + +Key libraries and versions: +• Python >= 3.10,<3.13 +• nbformat +• pydantic +• typing_extensions + +For development, additional libraries such as pytest, flake8, and others may be included for testing and linting. + +## Versioning +The underlying version of this tool is managed by its own distribution metadata. You can retrieve the tool's version by referencing the __version__ attribute in the package (if installed from PyPI) or by checking the version field in the pyproject.toml file. + +------------------------------------------------------------------- + +For any issues, please consult the nbformat documentation to ensure your notebooks are well-formed. This tool primarily serves to confirm schema compliance, which is an essential first step in verifying proper notebook functionality in the broader Jupyter ecosystem. + +Happy validating! \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytervalidatenotebook/payload.json b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/payload.json new file mode 100644 index 000000000..079fa00ae --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool that validates a NotebookNode object against the Jupyter Notebook schema using nbformat, ensuring structural correctness.", + "PACKAGE_ROOT": "swarmauri_tool_jupytervalidatenotebook", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterValidateNotebookTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Accept a NotebookNode as input.", + "Validate the notebook against its JSON schema.", + "Log any discrepancies or validation errors.", + "Handle invalid notebook structures gracefully.", + "Provide a report on the validation results.", + "Return a boolean status indicating validity." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbformat", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytervalidatenotebook/pyproject.toml b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/pyproject.toml new file mode 100644 index 000000000..0559edfe1 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/pyproject.toml @@ -0,0 +1,58 @@ +[tool.poetry] +name = "swarmauri_tool_jupytervalidatenotebook" +version = "0.6.1" +description = "A tool that validates a NotebookNode object against the Jupyter Notebook schema using nbformat, ensuring structural correctness." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbformat = "^5.10.4" +pydantic = "^2.10.6" +typing_extensions = "^4.12.2" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupytervalidatenotebooktool = "swarmauri_tool_jupytervalidatenotebook:JupyterValidateNotebookTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupytervalidatenotebook/swarmauri_tool_jupytervalidatenotebook/JupyterValidateNotebookTool.py b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/swarmauri_tool_jupytervalidatenotebook/JupyterValidateNotebookTool.py new file mode 100644 index 000000000..fe107dbbf --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/swarmauri_tool_jupytervalidatenotebook/JupyterValidateNotebookTool.py @@ -0,0 +1,73 @@ +""" +JupyterValidateNotebookTool.py + +This module defines the JupyterValidateNotebookTool, a component that validates a Jupyter +notebook against its JSON schema using nbformat. It inherits from the swarmauri framework +classes to integrate seamlessly as a tool that can be invoked with a NotebookNode as input. +""" + +import logging +from typing import List, Dict +from pydantic import Field +from typing_extensions import Literal +import jsonschema +import nbformat +from nbformat import NotebookNode +from nbformat.validator import NotebookValidationError + + +from swarmauri_core.ComponentBase import ComponentBase +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase + + +@ComponentBase.register_type(ToolBase, "JupyterValidateNotebookTool") +class JupyterValidateNotebookTool(ToolBase): + """ + JupyterValidateNotebookTool is a tool that validates a Jupyter NotebookNode against + its JSON schema. It leverages nbformat to perform the validation, logs any discrepancies + or errors, and returns a report along with a boolean indicating validity. + + Attributes: + version (str): The version of the JupyterValidateNotebookTool. + parameters (List[Parameter]): A list of parameters needed for notebook validation. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterValidateNotebookTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook", + type="object", + description="A Jupyter NotebookNode object to validate.", + required=True, + ), + ] + ) + name: str = "JupyterValidateNotebookTool" + description: str = "Validates a Jupyter notebook structure against its JSON schema." + type: Literal["JupyterValidateNotebookTool"] = "JupyterValidateNotebookTool" + + def __call__(self, notebook: NotebookNode) -> Dict[str, str]: + logger = logging.getLogger(__name__) + try: + # Explicitly check that the notebook is version 4. + if notebook.get("nbformat") != 4: + raise NotebookValidationError( + f"Invalid nbformat version: {notebook.get('nbformat')}. Expected 4." + ) + nbformat.validate(notebook) + logger.info("Notebook validation succeeded.") + return { + "valid": "True", + "report": "The notebook is valid according to its JSON schema.", + } + except (NotebookValidationError, jsonschema.ValidationError) as e: + logger.error(f"Notebook validation error: {e}") + return {"valid": "False", "report": f"Validation error: {str(e)}"} + except Exception as e: + logger.error(f"An unexpected error occurred during validation: {e}") + return {"valid": "False", "report": f"Unexpected error: {str(e)}"} diff --git a/pkgs/community/swarmauri_tool_jupytervalidatenotebook/swarmauri_tool_jupytervalidatenotebook/__init__.py b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/swarmauri_tool_jupytervalidatenotebook/__init__.py new file mode 100644 index 000000000..9ce507a5f --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/swarmauri_tool_jupytervalidatenotebook/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupytervalidatenotebook.JupyterValidateNotebookTool import ( + JupyterValidateNotebookTool, +) + + +__all__ = ["JupyterValidateNotebookTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupytervalidatenotebook") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupytervalidatenotebook/tests/unit/test_JupyterValidateNotebookTool.py b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/tests/unit/test_JupyterValidateNotebookTool.py new file mode 100644 index 000000000..27cd403a8 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/tests/unit/test_JupyterValidateNotebookTool.py @@ -0,0 +1,104 @@ +""" +test_JupyterValidateNotebookTool.py + +Pytest based unit tests for JupyterValidateNotebookTool from +swarmauri_tool_jupytervalidatenotebook.JupyterValidateNotebookTool. +This file ensures that the tool behaves correctly for valid and invalid notebooks, +and checks various attributes and features of the class. +""" + +import nbformat +from nbformat import NotebookNode +from swarmauri_tool_jupytervalidatenotebook.JupyterValidateNotebookTool import ( + JupyterValidateNotebookTool, +) +from swarmauri_base.tools.ToolBase import ToolBase + + +def test_class_inheritance() -> None: + """ + Test whether JupyterValidateNotebookTool inherits from ToolBase. + """ + tool = JupyterValidateNotebookTool() + assert isinstance(tool, ToolBase), ( + "JupyterValidateNotebookTool should inherit from ToolBase." + ) + + +def test_class_attributes() -> None: + """ + Test the existence and correctness of class attributes. + """ + tool = JupyterValidateNotebookTool() + assert tool.version == "1.0.0", "Version attribute should be '1.0.0'." + assert tool.name == "JupyterValidateNotebookTool", "Name attribute mismatch." + assert ( + tool.description + == "Validates a Jupyter notebook structure against its JSON schema." + ) + assert tool.type == "JupyterValidateNotebookTool", "Type attribute mismatch." + assert len(tool.parameters) == 1, "Should have exactly one parameter definition." + assert tool.parameters[0].name == "notebook", "Parameter name should be 'notebook'." + + +def test_valid_notebook_validation() -> None: + """ + Test the validation process with a valid notebook. Expecting a success response. + """ + # Create a valid minimal notebook + valid_notebook: NotebookNode = nbformat.v4.new_notebook() + valid_notebook["cells"] = [nbformat.v4.new_markdown_cell("Test")] + + tool = JupyterValidateNotebookTool() + result = tool(valid_notebook) + + assert result["valid"] == "True", "Valid notebook should return 'True'." + assert "The notebook is valid" in result["report"], ( + "Report should indicate successful validation." + ) + + +def test_invalid_notebook_validation() -> None: + """ + Test the validation process with an invalid notebook. Expecting a failure response. + """ + # Create a notebook with an invalid nbformat value (it must be 4 for v4 notebooks) + invalid_notebook: NotebookNode = nbformat.v4.new_notebook() + invalid_notebook["nbformat"] = ( + 3 # This violates the schema and should trigger NotebookValidationError + ) + + tool = JupyterValidateNotebookTool() + result = tool(invalid_notebook) + + assert result["valid"] == "False", "Invalid notebook should return 'False'." + assert "Validation error:" in result["report"], ( + "Report should contain the validation error message." + ) + + +def test_unexpected_error_handling(monkeypatch) -> None: + """ + Test how the tool handles unexpected errors during validation. + We monkeypatch nbformat.validate to raise a general exception. + """ + tool = JupyterValidateNotebookTool() + + def fake_validate(notebook, **kwargs): + raise RuntimeError("Unexpected runtime error!") + + # Create the notebook first to avoid affecting nbformat.v4.new_notebook() + notebook: NotebookNode = nbformat.v4.new_notebook() + notebook["cells"] = [nbformat.v4.new_markdown_cell("Test")] + + monkeypatch.setattr(nbformat, "validate", fake_validate) + + result = tool(notebook) + + assert result["valid"] == "False", "Should return 'False' on unexpected error." + assert "Unexpected error:" in result["report"], ( + "Report should indicate an unexpected error." + ) + assert "Unexpected runtime error!" in result["report"], ( + "Should capture the actual error message." + ) diff --git a/pkgs/community/swarmauri_tool_jupytervalidatenotebook/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/tests/unit/test___init__.py new file mode 100644 index 000000000..2c8ffc5fb --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupytervalidatenotebook/tests/unit/test___init__.py @@ -0,0 +1,50 @@ +""" +Unit tests for the swarmauri_tool_jupytervalidatenotebook package initialization. + +This module includes unittest-based test cases for verifying the initialization of +the swarmauri_tool_jupytervalidatenotebook package. It ensures that the __init__.py +file correctly exposes the JupyterValidateNotebookTool class and the __version__ attribute. +""" + +import unittest +from swarmauri_tool_jupytervalidatenotebook import ( + JupyterValidateNotebookTool, + __version__, +) + + +class TestInit(unittest.TestCase): + """ + Contains test cases for the package initialization. + Ensures the __init__.py file correctly exposes the JupyterValidateNotebookTool class. + """ + + def test_import_tool(self) -> None: + """ + Test that the JupyterValidateNotebookTool is properly importable from the package. + """ + # Check that the imported object is indeed a class. + self.assertTrue( + callable(JupyterValidateNotebookTool), + "JupyterValidateNotebookTool should be callable", + ) + + def test_instantiate_tool(self) -> None: + """ + Test that an instance of JupyterValidateNotebookTool can be created. + """ + # Creating an instance to ensure the class is functional. + tool_instance = JupyterValidateNotebookTool() + self.assertIsNotNone( + tool_instance, "Failed to instantiate JupyterValidateNotebookTool" + ) + + def test_version_attribute(self) -> None: + """ + Test that the __version__ attribute is properly exposed and valid. + """ + # Ensure __version__ is a non-empty string. + self.assertIsInstance(__version__, str, "__version__ should be a string") + self.assertGreater( + len(__version__), 0, "__version__ string should not be empty" + ) diff --git a/pkgs/community/swarmauri_tool_jupyterwritenotebook/LICENSE b/pkgs/community/swarmauri_tool_jupyterwritenotebook/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterwritenotebook/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/community/swarmauri_tool_jupyterwritenotebook/README.md b/pkgs/community/swarmauri_tool_jupyterwritenotebook/README.md new file mode 100644 index 000000000..38effcd12 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterwritenotebook/README.md @@ -0,0 +1,113 @@ +![Swarmauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_tool_jupyterwritenotebook +

+ +--- + +# swarmauri_tool_jupyterwritenotebook + +The "swarmauri_tool_jupyterwritenotebook" package provides a tool that writes a Jupyter NotebookNode (or a dictionary structured like a NotebookNode) to a file in JSON format, preserving notebook structure. It comes as a fully functional component ready for integration in Python projects requiring automated notebook generation or manipulation. + +## Installation + +To install this package, make sure you have Python 3.10 or higher, then run: + +• Using pip: + pip install swarmauri_tool_jupyterwritenotebook + +• Using Poetry: + poetry add swarmauri_tool_jupyterwritenotebook + +Once installation is complete, you can import and use the tool directly in your Python code. + +## Usage + +Below is a step-by-step example showing how to use JupyterWriteNotebookTool to write a notebook to disk: + +1. Import the required class: + -------------------------------------------------------------------------------- + from swarmauri_tool_jupyterwritenotebook import JupyterWriteNotebookTool + +2. Initialize the tool: + -------------------------------------------------------------------------------- + tool = JupyterWriteNotebookTool() + +3. Prepare the notebook data as a dictionary. This can be a valid NotebookNode (like what nbformat produces) or a similarly structured Python dict: + -------------------------------------------------------------------------------- + sample_notebook = { + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Hello, Swarmauri!\n", + "This is a sample notebook cell." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 5 + } + +4. Call the tool by providing the dictionary (or NotebookNode) and the output file path: + -------------------------------------------------------------------------------- + result = tool( + notebook_data=sample_notebook, + output_file="output_notebook.ipynb", + encoding="utf-8" + ) + print(result) + +If the operation succeeds, "output_notebook.ipynb" will be created and populated with valid notebook JSON content. The returned dictionary may look like: +{ + "message": "Notebook written successfully", + "file_path": "output_notebook.ipynb" +} + +## Comprehensive Examples + +• Basic Notebook Creation: + - You can create a minimal notebook dict with a single Markdown cell and save it. + - The returned information indicates whether the write was successful. + +• Error Handling: + - If an error occurs (e.g., lack of file permissions, invalid notebook data), the tool returns a dictionary containing an "error" key with a descriptive message. + +• Read-Back Verification: + - The tool attempts to reload the created file to ensure it was written correctly. If the read-back fails (e.g., empty or corrupted file), it returns a dictionary with an "error" key. + +## Dependencies + +The primary dependencies for "swarmauri_tool_jupyterwritenotebook" include: + +• Python 3.10 or newer +• nbformat for handling notebook structures +• swarmauri_core and swarmauri_base for core functionality and base class definitions + +You will need these installed in your environment to use this package effectively. Other dev dependencies (such as pytest) are only necessary if you plan to run or extend the existing test suite. + +--- + +© 2023 Swarmauri. Licensed under the Apache-2.0 License. \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterwritenotebook/payload.json b/pkgs/community/swarmauri_tool_jupyterwritenotebook/payload.json new file mode 100644 index 000000000..0e7281762 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterwritenotebook/payload.json @@ -0,0 +1,26 @@ +{ + "PROJECT_ROOT": "pkgs", + "PACKAGE_DESCRIPTION": "A tool that writes a NotebookNode object to a file in JSON format, preserving the notebook structure.", + "PACKAGE_ROOT": "swarmauri_tools_jupyterwritenotebook", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "JupyterWriteNotebookTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Convert a NotebookNode into JSON format.", + "Write the JSON data to a specified file.", + "Ensure the notebook structure remains intact.", + "Log write operations and handle potential I/O errors.", + "Support different output file encodings.", + "Confirm successful file creation and integrity." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { + "name": "nbformat", + "version": "*" + } + ] +} \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterwritenotebook/pyproject.toml b/pkgs/community/swarmauri_tool_jupyterwritenotebook/pyproject.toml new file mode 100644 index 000000000..2867f0040 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterwritenotebook/pyproject.toml @@ -0,0 +1,57 @@ +[tool.poetry] +name = "swarmauri_tool_jupyterwritenotebook" +version = "0.6.1" +description = "A tool that writes a NotebookNode object to a file in JSON format, preserving the notebook structure." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/community/swarmauri_tool_jupyterwritenotebook/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + +# Dependencies +nbformat = "^5.10.4" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "test: standard test", + "unit: Unit tests", + "integration: Integration tests", + "i9n: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.tools"] +jupyterwritenotebooktool = "swarmauri_tool_jupyterwritenotebook:JupyterWriteNotebookTool" \ No newline at end of file diff --git a/pkgs/community/swarmauri_tool_jupyterwritenotebook/swarmauri_tool_jupyterwritenotebook/JupyterWriteNotebookTool.py b/pkgs/community/swarmauri_tool_jupyterwritenotebook/swarmauri_tool_jupyterwritenotebook/JupyterWriteNotebookTool.py new file mode 100644 index 000000000..c65880fe5 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterwritenotebook/swarmauri_tool_jupyterwritenotebook/JupyterWriteNotebookTool.py @@ -0,0 +1,127 @@ +""" +JupyterWriteNotebookTool.py + +This module defines the JupyterWriteNotebookTool, a component that converts a Jupyter notebook +structure (NotebookNode) to JSON format and writes the data to a specified file. It inherits +from the ToolBase class of the swarmauri framework, providing a fully-featured implementation +for writing notebook content to disk. + +The tool validates notebook data, handles potential I/O operations, logs its actions, and +confirms the success of write operations to ensure notebook integrity. +""" + +import json +import logging +from typing import List, Literal, Dict +from pydantic import Field +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + +logger = logging.getLogger(__name__) + + +@ComponentBase.register_type(ToolBase, "JupyterWriteNotebookTool") +class JupyterWriteNotebookTool(ToolBase): + """ + JupyterWriteNotebookTool is responsible for converting a Jupyter NotebookNode + structure into JSON and writing it to disk. It ensures the notebook format + remains valid, including optional read-back verification to confirm the file's + integrity. + + Attributes: + version (str): The version of the JupyterWriteNotebookTool. + parameters (List[Parameter]): A list of parameters required to perform the write operation. + name (str): The name of the tool. + description (str): A brief description of the tool's functionality. + type (Literal["JupyterWriteNotebookTool"]): The type identifier for the tool. + """ + + version: str = "1.0.0" + parameters: List[Parameter] = Field( + default_factory=lambda: [ + Parameter( + name="notebook_data", + type="object", + description="The notebook content as a dictionary/NotebookNode structure.", + required=True, + ), + Parameter( + name="output_file", + type="string", + description="Path to the output file where the notebook JSON will be written.", + required=True, + ), + Parameter( + name="encoding", + type="string", + description="File encoding to use when writing the notebook JSON.", + required=False, + default="utf-8", + ), + ] + ) + name: str = "JupyterWriteNotebookTool" + description: str = "Writes a Jupyter NotebookNode to a file in JSON format." + type: Literal["JupyterWriteNotebookTool"] = "JupyterWriteNotebookTool" + + def __call__( + self, notebook_data: dict, output_file: str, encoding: str = "utf-8" + ) -> Dict[str, str]: + """ + Writes the provided Jupyter notebook data (NotebookNode) to the specified + output file in JSON format. The method uses the selected encoding and + handles potential I/O exceptions. + + Args: + notebook_data (dict): The Jupyter NotebookNode structure represented as a dictionary. + output_file (str): The path to the file where the notebook JSON will be written. + encoding (str, optional): The file encoding to use. Defaults to "utf-8". + + Returns: + Dict[str, str]: A dictionary indicating the success of the operation or an error message. + For example: + { + "message": "Notebook written successfully", + "file_path": "path/to/notebook.ipynb" + } + + Or in case of an error: + { + "error": "An error occurred: " + } + """ + logger.info( + "Attempting to write notebook to file '%s' with encoding '%s'", + output_file, + encoding, + ) + + try: + # Convert the notebook data to JSON text. + json_data = json.dumps(notebook_data, ensure_ascii=False, indent=2) + + # Write the JSON data to file. + with open(output_file, "w", encoding=encoding) as f: + f.write(json_data) + logger.info("Notebook successfully written to '%s'", output_file) + + # Optional read-back check to confirm integrity. + with open(output_file, "r", encoding=encoding) as f: + content = json.load(f) + if not content: + logger.error("Notebook data verification failed: File is empty.") + return { + "error": f"Notebook data verification failed: File '{output_file}' is empty." + } + + logger.info("Notebook data verification successful for '%s'", output_file) + return { + "message": "Notebook written successfully", + "file_path": output_file, + } + + except Exception as e: + error_msg = f"An error occurred during notebook write operation: {str(e)}" + logger.error(error_msg) + return {"error": error_msg} diff --git a/pkgs/community/swarmauri_tool_jupyterwritenotebook/swarmauri_tool_jupyterwritenotebook/__init__.py b/pkgs/community/swarmauri_tool_jupyterwritenotebook/swarmauri_tool_jupyterwritenotebook/__init__.py new file mode 100644 index 000000000..2270f08a0 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterwritenotebook/swarmauri_tool_jupyterwritenotebook/__init__.py @@ -0,0 +1,19 @@ +from swarmauri_tool_jupyterwritenotebook.JupyterWriteNotebookTool import ( + JupyterWriteNotebookTool, +) + + +__all__ = ["JupyterWriteNotebookTool"] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("swarmauri_tool_jupyterwritenotebook") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_tool_jupyterwritenotebook/tests/unit/test_JupyterWriteNotebookTool.py b/pkgs/community/swarmauri_tool_jupyterwritenotebook/tests/unit/test_JupyterWriteNotebookTool.py new file mode 100644 index 000000000..aaaf7381d --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterwritenotebook/tests/unit/test_JupyterWriteNotebookTool.py @@ -0,0 +1,119 @@ +""" +test_JupyterWriteNotebookTool.py + +This module contains pytest-based unit tests for the JupyterWriteNotebookTool class, +ensuring it correctly writes Jupyter notebook data to disk in JSON format and verifies +basic integrity checks. +""" + +import os +import json +import pytest +from typing import Dict, Any +from swarmauri_tool_jupyterwritenotebook.JupyterWriteNotebookTool import ( + JupyterWriteNotebookTool, +) + + +@pytest.fixture +def sample_notebook_data() -> Dict[str, Any]: + """ + Returns a sample structure representing a minimal Jupyter notebook. + """ + return { + "nbformat": 4, + "nbformat_minor": 5, + "metadata": {}, + "cells": [ + {"cell_type": "markdown", "metadata": {}, "source": ["# Sample Notebook"]} + ], + } + + +def test_tool_attributes() -> None: + """ + Tests the basic attributes of JupyterWriteNotebookTool to verify + it initializes with the expected meta-data. + """ + tool = JupyterWriteNotebookTool() + assert tool.name == "JupyterWriteNotebookTool", ( + "Tool name does not match expected value." + ) + assert tool.type == "JupyterWriteNotebookTool", ( + "Tool type does not match expected value." + ) + assert tool.version == "1.0.0", "Tool version does not match expected value." + assert len(tool.parameters) == 3, "Unexpected number of parameters in the tool." + assert tool.parameters[0].name == "notebook_data", ( + "Expected parameter 'notebook_data' missing." + ) + assert tool.parameters[1].name == "output_file", ( + "Expected parameter 'output_file' missing." + ) + + +def test_call_success( + tmp_path: pytest.TempPathFactory, sample_notebook_data: Dict[str, Any] +) -> None: + """ + Tests that the tool successfully writes notebook data to a file and + verifies its integrity by reading it back. + """ + tool = JupyterWriteNotebookTool() + output_file = tmp_path / "test_notebook.ipynb" + + result = tool( + notebook_data=sample_notebook_data, + output_file=str(output_file), + encoding="utf-8", + ) + + # Verify that the file was created and the returned message indicates success + assert "message" in result, f"Expected success message but got: {result}" + assert "Notebook written successfully" in result["message"], ( + "Success message not found." + ) + assert os.path.exists(output_file), "Output file does not exist after writing." + + # Verify the written content + with open(output_file, "r", encoding="utf-8") as f: + loaded_data = json.load(f) + assert loaded_data["nbformat"] == 4, "Written notebook data has incorrect nbformat." + assert loaded_data["nbformat_minor"] == 5, ( + "Written notebook data has incorrect nbformat_minor." + ) + + +def test_call_empty_notebook_data(tmp_path: pytest.TempPathFactory) -> None: + """ + Tests that writing an empty notebook data structure still succeeds, + but logs an error if the verification step fails due to empty content. + """ + tool = JupyterWriteNotebookTool() + output_file = tmp_path / "empty_notebook.ipynb" + + empty_data = {} + result = tool( + notebook_data=empty_data, output_file=str(output_file), encoding="utf-8" + ) + + # If verification fails, an error key is returned in the result + assert "error" in result, ( + f"Expected an error for empty notebook data, got: {result}" + ) + + +def test_call_invalid_file_path(sample_notebook_data: Dict[str, Any]) -> None: + """ + Tests that providing an invalid file path results in an error. + """ + tool = JupyterWriteNotebookTool() + # Using an invalid path (e.g., empty string or invalid characters) should trigger an exception + result = tool(notebook_data=sample_notebook_data, output_file="", encoding="utf-8") + + assert "error" in result, ( + "Expected an error for an invalid file path but got a success response." + ) + assert "An error occurred during notebook write operation" in result["error"], ( + "Unexpected error message returned." + ) diff --git a/pkgs/community/swarmauri_tool_jupyterwritenotebook/tests/unit/test___init__.py b/pkgs/community/swarmauri_tool_jupyterwritenotebook/tests/unit/test___init__.py new file mode 100644 index 000000000..5e3dc6286 --- /dev/null +++ b/pkgs/community/swarmauri_tool_jupyterwritenotebook/tests/unit/test___init__.py @@ -0,0 +1,39 @@ +""" +Unit tests for the swarmauri_tool_jupyterwritenotebook package initialization. + +This module tests that the package's __init__.py file correctly exposes its +public API and provides necessary metadata such as version information. +""" + +from swarmauri_tool_jupyterwritenotebook import JupyterWriteNotebookTool, __version__ + + +def test_jupyter_write_notebook_tool_import() -> None: + """ + Test that JupyterWriteNotebookTool can be imported from the package. + Ensures that the class is properly exposed in __init__.py. + """ + assert JupyterWriteNotebookTool is not None, ( + "Expected JupyterWriteNotebookTool to be exposed." + ) + + +def test_jupyter_write_notebook_tool_instantiation() -> None: + """ + Verify that the JupyterWriteNotebookTool can be instantiated + without errors. + """ + tool_instance = JupyterWriteNotebookTool() + assert isinstance(tool_instance, JupyterWriteNotebookTool), ( + "Expected tool_instance to be an instance of JupyterWriteNotebookTool." + ) + + +def test_package_version_existence() -> None: + """ + Check that the __version__ attribute exists and has the correct format. + """ + assert __version__, "Expected __version__ to be defined in package." + assert isinstance(__version__, str), "Expected __version__ to be a string." + # A simple check that version string has at least a major version + assert len(__version__) > 0, "Expected a non-empty version string." diff --git a/pkgs/community/swarmauri_tool_lexicaldensity/pyproject.toml b/pkgs/community/swarmauri_tool_lexicaldensity/pyproject.toml index 7af2466fc..eb23c25fe 100644 --- a/pkgs/community/swarmauri_tool_lexicaldensity/pyproject.toml +++ b/pkgs/community/swarmauri_tool_lexicaldensity/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_lexicaldensity" -version = "0.6.1.dev9" +version = "0.6.1" description = "Lexical Density Tool for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies textstat = "^0.7.4" diff --git a/pkgs/community/swarmauri_tool_psutil/pyproject.toml b/pkgs/community/swarmauri_tool_psutil/pyproject.toml index 0c5243d81..73b3d5417 100644 --- a/pkgs/community/swarmauri_tool_psutil/pyproject.toml +++ b/pkgs/community/swarmauri_tool_psutil/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_psutil" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Psutil Tool." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies psutil = "^6.1.0" diff --git a/pkgs/community/swarmauri_tool_qrcodegenerator/pyproject.toml b/pkgs/community/swarmauri_tool_qrcodegenerator/pyproject.toml index d96cda34a..a6344205a 100644 --- a/pkgs/community/swarmauri_tool_qrcodegenerator/pyproject.toml +++ b/pkgs/community/swarmauri_tool_qrcodegenerator/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_qrcodegenerator" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri QR Code Generator Tool." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies qrcode = "^7.3.1" diff --git a/pkgs/community/swarmauri_tool_sentencecomplexity/pyproject.toml b/pkgs/community/swarmauri_tool_sentencecomplexity/pyproject.toml index c56d2ddd6..c4283ddd1 100644 --- a/pkgs/community/swarmauri_tool_sentencecomplexity/pyproject.toml +++ b/pkgs/community/swarmauri_tool_sentencecomplexity/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_sentencecomplexity" -version = "0.6.1.dev9" +version = "0.6.1" description = "This repository includes an example of a First Class Swarmauri Example." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies nltk = "^3.9.1" diff --git a/pkgs/community/swarmauri_tool_sentimentanalysis/pyproject.toml b/pkgs/community/swarmauri_tool_sentimentanalysis/pyproject.toml index 2a1edb30e..3b9770738 100644 --- a/pkgs/community/swarmauri_tool_sentimentanalysis/pyproject.toml +++ b/pkgs/community/swarmauri_tool_sentimentanalysis/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_sentimentanalysis" -version = "0.6.1.dev9" +version = "0.6.1" description = "Sentiment Analysis Tool" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } [tool.poetry.group.dev.dependencies] flake8 = "^7.0" diff --git a/pkgs/community/swarmauri_tool_smogindex/poetry.lock b/pkgs/community/swarmauri_tool_smogindex/poetry.lock index 803adf3d7..fcd7941d4 100644 --- a/pkgs/community/swarmauri_tool_smogindex/poetry.lock +++ b/pkgs/community/swarmauri_tool_smogindex/poetry.lock @@ -1213,7 +1213,7 @@ develop = false [package.dependencies] pydantic = "^2.0" -swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/core"} +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } [package.source] type = "git" @@ -1265,7 +1265,7 @@ Pillow = ">=8.0,<11.0" pydantic = "^2.9.2" requests = "^2.32.3" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/base"} -swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/core"} +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } toml = "^0.10.2" typing_extensions = "*" diff --git a/pkgs/community/swarmauri_tool_smogindex/pyproject.toml b/pkgs/community/swarmauri_tool_smogindex/pyproject.toml index e953c56a4..588fd8789 100644 --- a/pkgs/community/swarmauri_tool_smogindex/pyproject.toml +++ b/pkgs/community/swarmauri_tool_smogindex/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_smogindex" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Smog Index Tool." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies nltk = "^3.9.1" diff --git a/pkgs/community/swarmauri_tool_textlength/pyproject.toml b/pkgs/community/swarmauri_tool_textlength/pyproject.toml index 6cc370374..bbbb4f3cd 100644 --- a/pkgs/community/swarmauri_tool_textlength/pyproject.toml +++ b/pkgs/community/swarmauri_tool_textlength/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_textlength" -version = "0.6.1.dev9" +version = "0.6.1" description = "Text Length Tool for Swarmauri" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies nltk = "^3.9.1" diff --git a/pkgs/community/swarmauri_tool_webscraping/pyproject.toml b/pkgs/community/swarmauri_tool_webscraping/pyproject.toml index 22d03c79c..19b4cd515 100644 --- a/pkgs/community/swarmauri_tool_webscraping/pyproject.toml +++ b/pkgs/community/swarmauri_tool_webscraping/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_webscraping" -version = "0.6.1.dev9" +version = "0.6.1" description = "Web Scraping Tool for Swarmauri" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies beautifulsoup4 = "^4.10.0" diff --git a/pkgs/community/swarmauri_tool_zapierhook/pyproject.toml b/pkgs/community/swarmauri_tool_zapierhook/pyproject.toml index be5894dd1..1a88d085a 100644 --- a/pkgs/community/swarmauri_tool_zapierhook/pyproject.toml +++ b/pkgs/community/swarmauri_tool_zapierhook/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_zapierhook" -version = "0.6.1.dev9" +version = "0.6.1" description = "Zapier Hook Tool" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } [tool.poetry.group.dev.dependencies] flake8 = "^7.0" diff --git a/pkgs/community/swarmauri_toolkit_github/poetry.lock b/pkgs/community/swarmauri_toolkit_github/poetry.lock index 97bffd833..2bd8864cb 100644 --- a/pkgs/community/swarmauri_toolkit_github/poetry.lock +++ b/pkgs/community/swarmauri_toolkit_github/poetry.lock @@ -1251,7 +1251,7 @@ files = [ [[package]] name = "swarmauri-base" -version = "0.6.1.dev6" +version = "0.6.1.dev9" description = "This repository includes base classes and mixins for the Swarmauri framework." optional = false python-versions = ">=3.10,<3.13" @@ -1267,12 +1267,12 @@ swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "0feb55021d56fd776b2a28fe9c3e47ec8fc68bc5" +resolved_reference = "c20c1ca6b38639f02eaa0d65de8992a9df50863f" subdirectory = "pkgs/base" [[package]] name = "swarmauri-core" -version = "0.6.1.dev6" +version = "0.6.1.dev9" description = "This repository includes core interfaces for the Swarmauri framework." optional = false python-versions = ">=3.10,<3.13" @@ -1288,12 +1288,12 @@ pyyaml = "^6.0.2" type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "0feb55021d56fd776b2a28fe9c3e47ec8fc68bc5" +resolved_reference = "c20c1ca6b38639f02eaa0d65de8992a9df50863f" subdirectory = "pkgs/core" [[package]] name = "swarmauri-standard" -version = "0.6.1.dev6" +version = "0.6.1.dev9" description = "This repository includes standard components within the Swarmauri framework." optional = false python-versions = ">=3.10,<3.13" @@ -1319,7 +1319,7 @@ typing_extensions = "*" type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "0feb55021d56fd776b2a28fe9c3e47ec8fc68bc5" +resolved_reference = "c20c1ca6b38639f02eaa0d65de8992a9df50863f" subdirectory = "pkgs/standards/swarmauri_standard" [[package]] diff --git a/pkgs/community/swarmauri_toolkit_github/pyproject.toml b/pkgs/community/swarmauri_toolkit_github/pyproject.toml index 9d3401282..cd8fdf1ed 100644 --- a/pkgs/community/swarmauri_toolkit_github/pyproject.toml +++ b/pkgs/community/swarmauri_toolkit_github/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_toolkit_github" -version = "0.6.1.dev9" +version = "0.6.1" description = "Github Toolkit" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies pygithub = "^2.4.0" diff --git a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubBranchTool.py b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubBranchTool.py index 1558d4515..8923c3166 100644 --- a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubBranchTool.py +++ b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubBranchTool.py @@ -42,7 +42,7 @@ class GithubBranchTool(ToolBase): name: str = "GithubBranchTool" description: str = "Interacts with GitHub branches using PyGithub." type: Literal["GithubBranchTool"] = "GithubBranchTool" - token: str + api_token: str model_config = ConfigDict(arbitrary_types_allowed=True) def __call__(self, action: str, **kwargs) -> Dict[str, Any]: @@ -64,7 +64,7 @@ def __call__(self, action: str, **kwargs) -> Dict[str, Any]: } if action in action_map: - self._github = Github(self.token) + self._github = Github(self.api_token) return {action: action_map[action](**kwargs)} raise ValueError(f"Action '{action}' is not supported.") diff --git a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubCommitTool.py b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubCommitTool.py index 53f79e9c1..891ad7cb8 100644 --- a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubCommitTool.py +++ b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubCommitTool.py @@ -72,7 +72,7 @@ class GithubCommitTool(ToolBase): "Interacts with GitHub repositories using PyGithub to submit commits." ) type: Literal["GithubCommitTool"] = "GithubCommitTool" - token: str + api_token: str model_config = ConfigDict(arbitrary_types_allowed=True) def __call__(self, action: str, **kwargs) -> Dict[str, Any]: @@ -94,7 +94,7 @@ def __call__(self, action: str, **kwargs) -> Dict[str, Any]: } if action in action_map: - self._github = Github(self.token) + self._github = Github(self.api_token) return {action: action_map[action](**kwargs)} raise ValueError(f"Action '{action}' is not supported.") diff --git a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubIssueTool.py b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubIssueTool.py index ffb069396..5ae8967c4 100644 --- a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubIssueTool.py +++ b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubIssueTool.py @@ -46,7 +46,7 @@ class GithubIssueTool(ToolBase): name: str = "GithubIssueTool" description: str = "Interacts with GitHub repositories using PyGithub." type: Literal["GithubIssueTool"] = "GithubIssueTool" - token: str + api_token: str model_config = ConfigDict(arbitrary_types_allowed=True) def __call__(self, action: str, **kwargs) -> Dict[str, Any]: @@ -69,7 +69,7 @@ def __call__(self, action: str, **kwargs) -> Dict[str, Any]: } if action in action_map: - self._github = Github(self.token) + self._github = Github(self.api_token) return {action: action_map[action](**kwargs)} raise ValueError(f"Action '{action}' is not supported.") diff --git a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubPRTool.py b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubPRTool.py index 09f576273..0b06aa375 100644 --- a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubPRTool.py +++ b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubPRTool.py @@ -58,7 +58,7 @@ class GithubPRTool(ToolBase): name: str = "GithubPRTool" description: str = "Interacts with GitHub repositories using PyGithub." type: Literal["GithubPRTool"] = "GithubPRTool" - token: str + api_token: str model_config = ConfigDict(arbitrary_types_allowed=True) def __call__(self, action: str, **kwargs) -> Dict[str, Any]: @@ -81,7 +81,7 @@ def __call__(self, action: str, **kwargs) -> Dict[str, Any]: } if action in action_map: - self._github = Github(self.token) + self._github = Github(self.api_token) return {action: action_map[action](**kwargs)} raise ValueError(f"Action '{action}' is not supported.") diff --git a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubRepoTool.py b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubRepoTool.py index 3ec4aec1f..0c269554d 100644 --- a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubRepoTool.py +++ b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubRepoTool.py @@ -34,7 +34,7 @@ class GithubRepoTool(ToolBase): name: str = "GithubRepoTool" description: str = "Interacts with GitHub repositories using PyGithub." type: Literal["GithubRepoTool"] = "GithubRepoTool" - token: str + api_token: str model_config = ConfigDict(arbitrary_types_allowed=True) def __call__(self, action: str, **kwargs) -> Dict[str, Any]: @@ -57,7 +57,7 @@ def __call__(self, action: str, **kwargs) -> Dict[str, Any]: } if action in action_map: - self._github = Github(self.token) + self._github = Github(self.api_token) return {action: action_map[action](**kwargs)} raise ValueError(f"Action '{action}' is not supported.") diff --git a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubToolkit.py b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubToolkit.py index 9d0e4a394..5d9d8c8a5 100644 --- a/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubToolkit.py +++ b/pkgs/community/swarmauri_toolkit_github/swarmauri_toolkit_github/GithubToolkit.py @@ -29,15 +29,15 @@ def __init__(self, api_token: str, **kwargs): super().__init__(**kwargs) if not api_token: - raise ValueError("Invalid Token or Missing token") + raise ValueError("Invalid Token or Missing api_token") self.api_token = api_token - self.github_repo_tool = GithubRepoTool(token=self.api_token) - self.github_issue_tool = GithubIssueTool(token=self.api_token) - self.github_pr_tool = GithubPRTool(token=self.token) - self.github_branch_tool = GithubBranchTool(token=self.api_token) - self.github_commit_tool = GithubCommitTool(token=self.api_token) + self.github_repo_tool = GithubRepoTool(api_token=self.api_token) + self.github_issue_tool = GithubIssueTool(api_token=self.api_token) + self.github_pr_tool = GithubPRTool(api_token=self.token) + self.github_branch_tool = GithubBranchTool(api_token=self.api_token) + self.github_commit_tool = GithubCommitTool(api_token=self.api_token) self.add_tool(self.github_repo_tool) self.add_tool(self.github_issue_tool) diff --git a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubBranchTool_test.py b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubBranchTool_test.py index ff336404a..60dc3ac1f 100644 --- a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubBranchTool_test.py +++ b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubBranchTool_test.py @@ -11,19 +11,19 @@ load_dotenv() -# Fixture for retrieving GitHub token and skipping tests if not available +# Fixture for retrieving GitHub api_token and skipping tests if not available @pytest.fixture(scope="module") -def github_token(): - token = os.getenv("GITHUBTOOL_TEST_TOKEN") - if not token: +def github_api_token(): + api_token = os.getenv("GITHUBTOOL_TEST_TOKEN") + if not api_token: pytest.skip("Skipping due to GITHUBTOOL_TEST_TOKEN not set") - return token + return api_token # Fixture for initializing the GithubBranchTool @pytest.fixture(scope="module") -def github_branch_tool(github_token): - return Tool(token=github_token) +def github_branch_tool(github_api_token): + return Tool(api_token=github_api_token) @pytest.mark.unit diff --git a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubCommitTool_test.py b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubCommitTool_test.py index 5897a665a..bce161e67 100644 --- a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubCommitTool_test.py +++ b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubCommitTool_test.py @@ -10,19 +10,19 @@ load_dotenv() -# Fixture for retrieving GitHub token and skipping tests if not available +# Fixture for retrieving GitHub api_token and skipping tests if not available @pytest.fixture(scope="module") -def github_token(): - token = os.getenv("GITHUBTOOL_TEST_TOKEN") - if not token: +def github_api_token(): + api_token = os.getenv("GITHUBTOOL_TEST_TOKEN") + if not api_token: pytest.skip("Skipping due to GITHUBTOOL_TEST_TOKEN not set") - return token + return api_token # Fixture for initializing the GithubCommitTool @pytest.fixture(scope="module") -def github_commit_tool(github_token): - return Tool(token=github_token) +def github_commit_tool(github_api_token): + return Tool(api_token=github_api_token) @pytest.mark.unit diff --git a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubIssueTool_test.py b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubIssueTool_test.py index d4304e949..9d60bb469 100644 --- a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubIssueTool_test.py +++ b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubIssueTool_test.py @@ -10,19 +10,19 @@ load_dotenv() -# Fixture for retrieving GitHub token and skipping tests if not available +# Fixture for retrieving GitHub api_token and skipping tests if not available @pytest.fixture(scope="module") -def github_token(): - token = os.getenv("GITHUBTOOL_TEST_TOKEN") - if not token: +def github_api_token(): + api_token = os.getenv("GITHUBTOOL_TEST_TOKEN") + if not api_token: pytest.skip("Skipping due to GITHUBTOOL_TEST_TOKEN not set") - return token + return api_token # Fixture for initializing the GithubIssueTool @pytest.fixture(scope="module") -def github_issue_tool(github_token): - return Tool(token=github_token) +def github_issue_tool(github_api_token): + return Tool(api_token=github_api_token) @pytest.mark.unit diff --git a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubPRTool_test.py b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubPRTool_test.py index 2ef5af4db..7f35989c4 100644 --- a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubPRTool_test.py +++ b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubPRTool_test.py @@ -10,19 +10,19 @@ load_dotenv() -# Fixture for retrieving GitHub token and skipping tests if not available +# Fixture for retrieving GitHub api_token and skipping tests if not available @pytest.fixture(scope="module") -def github_token(): - token = os.getenv("GITHUBTOOL_TEST_TOKEN") - if not token: +def github_api_token(): + api_token = os.getenv("GITHUBTOOL_TEST_TOKEN") + if not api_token: pytest.skip("Skipping due to GITHUBTOOL_TEST_TOKEN not set") - return token + return api_token # Fixture for initializing the GithubPRTool @pytest.fixture(scope="module") -def github_pr_tool(github_token): - return Tool(token=github_token) +def github_pr_tool(github_api_token): + return Tool(api_token=github_api_token) @pytest.mark.unit diff --git a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubRepoTool_test.py b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubRepoTool_test.py index 9ffc2c281..d40f0df37 100644 --- a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubRepoTool_test.py +++ b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubRepoTool_test.py @@ -10,19 +10,19 @@ load_dotenv() -# Fixture for retrieving GitHub token and skipping tests if not available +# Fixture for retrieving GitHub api_token and skipping tests if not available @pytest.fixture(scope="module") -def github_token(): - token = os.getenv("GITHUBTOOL_TEST_TOKEN") - if not token: +def github_api_token(): + api_token = os.getenv("GITHUBTOOL_TEST_TOKEN") + if not api_token: pytest.skip("Skipping due to GITHUBTOOL_TEST_TOKEN not set") - return token + return api_token # Fixture for initializing the GithubRepoTool @pytest.fixture(scope="module") -def github_repo_tool(github_token): - return Tool(token=github_token) +def github_repo_tool(github_api_token): + return Tool(api_token=github_api_token) @pytest.mark.unit diff --git a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubToolkit_test.py b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubToolkit_test.py index 4138fa380..3f9637cac 100644 --- a/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubToolkit_test.py +++ b/pkgs/community/swarmauri_toolkit_github/tests/unit/GithubToolkit_test.py @@ -10,16 +10,16 @@ @pytest.fixture(scope="module") -def github_token(): - token = os.getenv("GITHUBTOOL_TEST_TOKEN") - if not token: +def github_api_token(): + api_token = os.getenv("GITHUBTOOL_TEST_TOKEN") + if not api_token: pytest.skip("Skipping due to GITHUBTOOL_TEST_TOKEN not set") - return token + return api_token @pytest.fixture(scope="module") -def github_toolkit(github_token): - return Toolkit(token=github_token) +def github_toolkit(github_api_token): + return Toolkit(api_token=github_api_token) @pytest.mark.unit diff --git a/pkgs/community/swarmauri_toolkit_jupytertoolkit/LICENSE b/pkgs/community/swarmauri_toolkit_jupytertoolkit/LICENSE new file mode 100644 index 000000000..c81176ea0 --- /dev/null +++ b/pkgs/community/swarmauri_toolkit_jupytertoolkit/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/pkgs/community/swarmauri_toolkit_jupytertoolkit/PACKAGE_REQUIREMENTS.md b/pkgs/community/swarmauri_toolkit_jupytertoolkit/PACKAGE_REQUIREMENTS.md new file mode 100644 index 000000000..5f79ec1f7 --- /dev/null +++ b/pkgs/community/swarmauri_toolkit_jupytertoolkit/PACKAGE_REQUIREMENTS.md @@ -0,0 +1,67 @@ +Here is the raw file ready to copy-paste: + +# Jupyter Toolkit Project Requirements +===================================================== + +## Purpose +-------- + +This document outlines the project-wide functional and non-functional requirements for the Jupyter Toolkit project. + +## Description +----------- + +The Jupyter Toolkit project aims to create a unified toolkit for aggregating standalone Jupyter notebook tools. This document provides a comprehensive overview of the requirements for the project. + +## Functional Requirements +------------------------- + +### 1. Toolkit Integration + +* The toolkit shall integrate multiple standalone Jupyter notebook tools into a single, unified interface. +* The toolkit shall provide a mechanism for adding and removing tools from the interface. + +### 2. Tool Management + +* The toolkit shall provide a centralized management system for all integrated tools. +* The toolkit shall allow users to configure and customize tool settings. + +### 3. User Interface + +* The toolkit shall provide an intuitive and user-friendly interface for accessing and using integrated tools. +* The toolkit shall support multiple interface themes and layouts. + +### 4. Nested Tools + +* The toolkit shall allow tools to be nested within each other. +* The toolkit shall provide a mechanism for managing and navigating nested tools. + +## Non-Functional Requirements +------------------------------ + +### 1. Performance + +* The toolkit shall respond to user input within 2 seconds. +* The toolkit shall be able to handle a minimum of 10 concurrent users. + +### 2. Security + +* The toolkit shall implement authentication and authorization mechanisms to ensure secure access to integrated tools. +* The toolkit shall encrypt all data transmitted between the client and server. + +### 3. Usability + +* The toolkit shall provide clear and concise documentation for users and developers. +* The toolkit shall follow accessibility guidelines to ensure usability for users with disabilities. + +## Constraints +-------------- + +* The toolkit shall be built using Python and Jupyter Notebook technologies. +* The toolkit shall be compatible with multiple operating systems, including Windows, macOS, and Linux. + +## Assumptions and Dependencies +------------------------------ + +* The toolkit assumes that all integrated tools are compatible with the Jupyter Notebook framework. +* The toolkit depends on the Jupyter Notebook framework and its dependencies. \ No newline at end of file diff --git a/pkgs/community/swarmauri_toolkit_jupytertoolkit/README.md b/pkgs/community/swarmauri_toolkit_jupytertoolkit/README.md new file mode 100644 index 000000000..8e61d84e1 --- /dev/null +++ b/pkgs/community/swarmauri_toolkit_jupytertoolkit/README.md @@ -0,0 +1,81 @@ +![Swamauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - swarmauri_toolkit_jupytertoolkit +

+ +--- + +# `swarmauri_toolkit_jupytertoolkit` + +A unified toolkit for aggregating standalone jupyter notebook tools. + +## Installation + +To install `swarmauri_toolkit_jupytertoolkit`, run the following command: + +```bash +pip install swarmauri_toolkit_jupytertoolkit +``` + +## Usage + +To use `swarmauri_toolkit_jupytertoolkit`, you can import it into your Python script or Jupyter Notebook: + +```python +from swarmauri_toolkit_jupytertoolkit import JupyterToolkit +``` + +You can then create an instance of the `JupyterToolkit` class: + +```python +toolkit = JupyterToolkit() +``` + +The `JupyterToolkit` class provides a centralized management system for all integrated tools. You can add and remove tools from the toolkit using the `add_tool` and `remove_tool` methods: + +```python +toolkit.add_tool("JupyterClearOutputTool") +toolkit.remove_tool("JupyterClearOutputTool") +``` + +You can also configure and customize tool settings using the `configure_tool` method: + +```python +toolkit.configure_tool("JupyterClearOutputTool", {"option": "value"}) +``` + +The `JupyterToolkit` class also provides an intuitive and user-friendly interface for accessing and using integrated tools. You can access the interface by calling the `display` method: + +```python +toolkit.display() +``` + +This will display the toolkit interface in the Jupyter Notebook. + +## Contributing + +To contribute to `swarmauri_toolkit_jupytertoolkit`, please fork the repository and submit a pull request. Please ensure that your contributions adhere to the project's coding standards and guidelines. + +## License + +`swarmauri_toolkit_jupytertoolkit` is licensed under the Apache License 2.0. Please see the LICENSE file for more information. + +## Authors + +* Jacob Stewart + +## Acknowledgments + +* The `swarmauri_toolkit_jupytertoolkit` project was made possible by the contributions of the Swarmauri community. +* The project is built on top of the Jupyter Notebook framework and its dependencies. \ No newline at end of file diff --git a/pkgs/community/swarmauri_toolkit_jupytertoolkit/pyproject.toml b/pkgs/community/swarmauri_toolkit_jupytertoolkit/pyproject.toml new file mode 100644 index 000000000..aa61f2329 --- /dev/null +++ b/pkgs/community/swarmauri_toolkit_jupytertoolkit/pyproject.toml @@ -0,0 +1,83 @@ +[tool.poetry] +name = "swarmauri_toolkit_jupytertoolkit" +version = "0.1.0" +description = "A unified toolkit for aggregating standalone jupyter notebook tools." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs//swarmauri_toolkit_jupytertoolkit/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] +keywords = [ + "jupyter", + "notebook", + "toolkit", + "swarmauri" +] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} + +# Community +swarmuri_tool_jupyterclearoutput = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterclearoutput"} +swarmuri_tool_jupyterdisplay = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterdisplay"} +swarmuri_tool_jupyterdisplayhtml = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterdisplayhtml"} +swarmuri_tool_jupyterexecuteandconvert = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterexecuteandconvert"} +swarmuri_tool_jupyterexecutecell = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterexecutecell"} +swarmuri_tool_jupyterexecutenotebook = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterexecutenotebook"} +swarmuri_tool_jupyterexecutenotebookwithparameters = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterexecutenotebookwithparameters"} +swarmuri_tool_jupyterexporthtml = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterexporthtml"} +swarmuri_tool_jupyterexportlatex = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterexportlatex"} +swarmuri_tool_jupyterexportmarkdown = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterexportmarkdown"} +swarmuri_tool_jupyterexportpython = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterexportpython"} +swarmuri_tool_jupyterfromdict = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterfromdict"} +swarmuri_tool_jupytergetiopubmessage = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupytergetiopubmessage"} +swarmuri_tool_jupytergetshellmessage = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupytergetshellmessage"} +swarmuri_tool_jupyterreadnotebook = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterreadnotebook"} +swarmuri_tool_jupyterruncell = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterruncell"} +swarmuri_tool_jupytershutdownkernel = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupytershutdownkernel"} +swarmuri_tool_jupyterstartkernel = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterstartkernel"} +swarmuri_tool_jupytervalidatenotebook = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupytervalidatenotebook"} +swarmuri_tool_jupyterwritenotebook = { git = "https://github.com/swarmauri/community.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_tool_jupyterwritenotebook"} + +# Dependencies +pydantic = "*" + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "unit: Unit tests", + "i9n: Integration tests", + "xfail: Expected failures", + "xpass: Expected passes" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.toolkits"] +jupytertoolkit = "swarmauri_toolkit_jupytertoolkit:JupyterToolkit" \ No newline at end of file diff --git a/pkgs/community/swarmauri_toolkit_jupytertoolkit/swarmauri_toolkit_jupytertoolkit/JupyterToolkit.py b/pkgs/community/swarmauri_toolkit_jupytertoolkit/swarmauri_toolkit_jupytertoolkit/JupyterToolkit.py new file mode 100644 index 000000000..62864c3a0 --- /dev/null +++ b/pkgs/community/swarmauri_toolkit_jupytertoolkit/swarmauri_toolkit_jupytertoolkit/JupyterToolkit.py @@ -0,0 +1,84 @@ +from typing import Dict, Literal +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_base.toolkits.ToolkitBase import ToolkitBase +from swarmauri_core.ComponentBase import ComponentBase, SubclassUnion + +from swarmauri_tool_jupyterclearoutput.JupyterClearOutputTool import ( + JupyterClearOutputTool, +) +from swarmauri_tool_jupyterdisplay.JupyterDisplayTool import JupyterDisplayTool +from swarmauri_tool_jupyterdisplayhtml.JupyterDisplayHtmlTool import ( + JupyterDisplayHtmlTool, +) +from swarmauri_tool_jupyterexecuteandconvert.JupyterExecuteAndConvertTool import ( + JupyterExecuteAndConvertTool, +) +from swarmauri_tool_jupyterexecutecell.JupyterExecuteCellTool import ( + JupyterExecuteCellTool, +) +from swarmauri_tool_jupyterexecutenotebook.JupyterExecuteNotebookTool import ( + JupyterExecuteNotebookTool, +) +from swarmauri_tool_jupyterexecutenotebookwithparameters.JupyterExecuteNotebookWithParametersTool import ( + JupyterExecuteNotebookWithParametersTool, +) +from swarmauri_tool_jupyterexporthtml.JupyterExportHtmlTool import JupyterExportHtmlTool +from swarmauri_tool_jupyterexportlatex.JupyterExportLatexTool import ( + JupyterExportLatexTool, +) +from swarmauri_tool_jupyterexportmarkdown.JupyterExportMarkdownTool import ( + JupyterExportMarkdownTool, +) +from swarmauri_tool_jupyterexportpython.JupyterExportPythonTool import ( + JupyterExportPythonTool, +) +from swarmauri_tool_jupyterfromdict.JupyterFromDictTool import JupyterFromDictTool +from swarmauri_tool_jupytergetiopubmessage.JupyterGetIoPubMessageTool import ( + JupyterGetIoPubMessageTool, +) +from swarmauri_tool_jupytergetshellmessage.JupyterGetShellMessageTool import ( + JupyterGetShellMessageTool, +) +from swarmauri_tool_jupyterreadnotebook.JupyterReadNotebookTool import ( + JupyterReadNotebookTool, +) +from swarmauri_tool_jupyterruncell.JupyterRunCellTool import JupyterRunCellTool +from swarmauri_tool_jupytershutdownkernel.JupyterShutdownKernelTool import ( + JupyterShutdownKernelTool, +) +from swarmauri_tool_jupyterstartkernel.JupyterStartKernelTool import ( + JupyterStartKernelTool, +) +from swarmauri_tool_jupytervalidatenotebook.JupyterValidateNotebookTool import ( + JupyterValidateNotebookTool, +) +from swarmauri_tool_jupyterwritenotebook.JupyterWriteNotebookTool import ( + JupyterWriteNotebookTool, +) + + +@ComponentBase.register_type(ToolkitBase, "JupyterToolkit") +class JupyterToolkit(ToolkitBase): + type: Literal["JupyterToolkit"] = "JupyterToolkit" + tools: Dict[str, SubclassUnion[ToolBase]] = { + "JupyterClearOutputTool": JupyterClearOutputTool(), + "JupyterDisplayTool": JupyterDisplayTool(), + "JupyterDisplayHtmlTool": JupyterDisplayHtmlTool(), + "JupyterExecuteAndConvertTool": JupyterExecuteAndConvertTool(), + "JupyterExecuteCellTool": JupyterExecuteCellTool(), + "JupyterExecuteNotebookTool": JupyterExecuteNotebookTool(), + "JupyterExecuteNotebookWithParametersTool": JupyterExecuteNotebookWithParametersTool(), + "JupyterExportHtmlTool": JupyterExportHtmlTool(), + "JupyterExportLatexTool": JupyterExportLatexTool(), + "JupyterExportMarkdownTool": JupyterExportMarkdownTool(), + "JupyterExportPythonTool": JupyterExportPythonTool(), + "JupyterFromDictTool": JupyterFromDictTool(), + "JupyterGetIoPubMessageTool": JupyterGetIoPubMessageTool(), + "JupyterGetShellMessageTool": JupyterGetShellMessageTool(), + "JupyterReadNotebookTool": JupyterReadNotebookTool(), + "JupyterRunCellTool": JupyterRunCellTool(), + "JupyterShutdownKernelTool": JupyterShutdownKernelTool(), + "JupyterStartKernelTool": JupyterStartKernelTool(), + "JupyterValidateNotebookTool": JupyterValidateNotebookTool(), + "JupyterWriteNotebookTool": JupyterWriteNotebookTool(), + } diff --git a/pkgs/tooling/monorepo_manager/monorepo_manager/__init__.py b/pkgs/community/swarmauri_toolkit_jupytertoolkit/swarmauri_toolkit_jupytertoolkit/__init__.py similarity index 73% rename from pkgs/tooling/monorepo_manager/monorepo_manager/__init__.py rename to pkgs/community/swarmauri_toolkit_jupytertoolkit/swarmauri_toolkit_jupytertoolkit/__init__.py index 429280c47..0056bd5b6 100644 --- a/pkgs/tooling/monorepo_manager/monorepo_manager/__init__.py +++ b/pkgs/community/swarmauri_toolkit_jupytertoolkit/swarmauri_toolkit_jupytertoolkit/__init__.py @@ -1,4 +1,6 @@ -# monorepo_manager/__init__.py +from .JupyterToolkit import JupyterToolkit + +__all__ = ["JupyterToolkit"] try: # For Python 3.8 and newer @@ -8,7 +10,7 @@ from importlib_metadata import version, PackageNotFoundError try: - __version__ = version("monorepo-manager") + __version__ = version("swarmauri_toolkit_jupytertoolkit") except PackageNotFoundError: # If the package is not installed (for example, during development) __version__ = "0.0.0" diff --git a/pkgs/community/swarmauri_toolkit_jupytertoolkit/tests/i9n/test_i9n__init__.py b/pkgs/community/swarmauri_toolkit_jupytertoolkit/tests/i9n/test_i9n__init__.py new file mode 100644 index 000000000..cf261897b --- /dev/null +++ b/pkgs/community/swarmauri_toolkit_jupytertoolkit/tests/i9n/test_i9n__init__.py @@ -0,0 +1,26 @@ +import pytest +from swarmauri_toolkit_jupytertoolkit import __version__ + + +@pytest.mark.i9n +class TestI9nInit: + """Tests the package initializer for swarmauri_toolkit_jupytertoolkit.""" + + def test_version(self): + """Ensures that the __version__ is correctly set.""" + assert __version__ != "0.0.0" + + def test_init(self): + """Ensures that the __init__.py loads correctly.""" + try: + from swarmauri_toolkit_jupytertoolkit import ( + JupyterToolkit as JupyterToolkit, + ) + except ImportError: + pytest.fail("Failed to import JupyterToolkit") + + def test_all(self): + """Ensures that the __all__ is correctly set.""" + from swarmauri_toolkit_jupytertoolkit import __all__ + + assert "JupyterToolkit" in __all__ diff --git a/pkgs/community/swarmauri_toolkit_jupytertoolkit/tests/unit/test_JupyterToolkit.py b/pkgs/community/swarmauri_toolkit_jupytertoolkit/tests/unit/test_JupyterToolkit.py new file mode 100644 index 000000000..6c703b7a1 --- /dev/null +++ b/pkgs/community/swarmauri_toolkit_jupytertoolkit/tests/unit/test_JupyterToolkit.py @@ -0,0 +1,47 @@ +import pytest +from swarmauri_toolkit_jupytertoolkit.JupyterToolkit import JupyterToolkit + + +@pytest.mark.unit +def test_type(): + """Test the type of JupyterToolkit""" + assert JupyterToolkit.type == "JupyterToolkit" + + +@pytest.mark.unit +def test_tools(): + """Test the tools attribute of JupyterToolkit""" + jupyter_toolkit = JupyterToolkit() + assert isinstance(jupyter_toolkit.tools, dict) + assert len(jupyter_toolkit.tools) > 0 + + +@pytest.mark.unit +def test_tool_instantiation(): + """Test the instantiation of tools in JupyterToolkit""" + jupyter_toolkit = JupyterToolkit() + for tool in jupyter_toolkit.tools.values(): + assert tool is not None + + +@pytest.mark.unit +def test_toolkit_registration(): + """Test the registration of JupyterToolkit as a ToolkitBase subclass""" + assert JupyterToolkit.__bases__[0].__name__ == "ToolkitBase" + + +@pytest.mark.unit +def test_model_dump_json(): + """Test the model_dump_json method of JupyterToolkit""" + jupyter_toolkit = JupyterToolkit() + json_data = jupyter_toolkit.model_dump_json() + assert isinstance(json_data, dict) + + +@pytest.mark.unit +def test_model_validate_json(): + """Test the model_validate_json method of JupyterToolkit""" + jupyter_toolkit = JupyterToolkit() + json_data = jupyter_toolkit.model_dump_json() + validated_json = JupyterToolkit.model_validate_json(json_data) + assert validated_json == json_data diff --git a/pkgs/community/swarmauri_vectorstore_annoy/pyproject.toml b/pkgs/community/swarmauri_vectorstore_annoy/pyproject.toml index 1121e9d76..c89a58105 100644 --- a/pkgs/community/swarmauri_vectorstore_annoy/pyproject.toml +++ b/pkgs/community/swarmauri_vectorstore_annoy/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_annoy" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Annoy Vector Store" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } swarmauri_embedding_doc2vec = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} # Dependencies diff --git a/pkgs/community/swarmauri_vectorstore_cloudweaviate/pyproject.toml b/pkgs/community/swarmauri_vectorstore_cloudweaviate/pyproject.toml index 7bb82ca85..32ecef1dd 100644 --- a/pkgs/community/swarmauri_vectorstore_cloudweaviate/pyproject.toml +++ b/pkgs/community/swarmauri_vectorstore_cloudweaviate/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_cloudweaviate" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Weaviate Vector Store" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } swarmauri_embedding_doc2vec = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} # Dependencies diff --git a/pkgs/community/swarmauri_vectorstore_duckdb/pyproject.toml b/pkgs/community/swarmauri_vectorstore_duckdb/pyproject.toml index 48ba6a2cd..aeab49366 100644 --- a/pkgs/community/swarmauri_vectorstore_duckdb/pyproject.toml +++ b/pkgs/community/swarmauri_vectorstore_duckdb/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_duckdb" -version = "0.6.1.dev9" +version = "0.6.1" description = "A DuckDB based Vector Store" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } swarmauri_embedding_doc2vec = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} # Dependencies diff --git a/pkgs/community/swarmauri_vectorstore_mlm/pyproject.toml b/pkgs/community/swarmauri_vectorstore_mlm/pyproject.toml index dac4f0c1b..ea393341e 100644 --- a/pkgs/community/swarmauri_vectorstore_mlm/pyproject.toml +++ b/pkgs/community/swarmauri_vectorstore_mlm/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_mlm" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri MLM Vector Store" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } swarmauri_embedding_doc2vec = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} # Dependencies diff --git a/pkgs/community/swarmauri_vectorstore_neo4j/pyproject.toml b/pkgs/community/swarmauri_vectorstore_neo4j/pyproject.toml index 9d6340d68..7e919b3ec 100644 --- a/pkgs/community/swarmauri_vectorstore_neo4j/pyproject.toml +++ b/pkgs/community/swarmauri_vectorstore_neo4j/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_neo4j" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Neo4j Vector Store" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies neo4j = "^5.25.0" diff --git a/pkgs/community/swarmauri_vectorstore_persistentchromadb/pyproject.toml b/pkgs/community/swarmauri_vectorstore_persistentchromadb/pyproject.toml index 46d6a549d..899c4dff4 100644 --- a/pkgs/community/swarmauri_vectorstore_persistentchromadb/pyproject.toml +++ b/pkgs/community/swarmauri_vectorstore_persistentchromadb/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_persistentchromadb" -version = "0.6.1.dev9" +version = "0.6.1" description = "A Persistent ChromaDB based Vector Store" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } swarmauri_embedding_doc2vec = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} diff --git a/pkgs/community/swarmauri_vectorstore_pinecone/pyproject.toml b/pkgs/community/swarmauri_vectorstore_pinecone/pyproject.toml index 8fce805fd..5970b1b8e 100644 --- a/pkgs/community/swarmauri_vectorstore_pinecone/pyproject.toml +++ b/pkgs/community/swarmauri_vectorstore_pinecone/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_pinecone" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Pinecone Vector Store" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } swarmauri_embedding_doc2vec = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} # Dependencies diff --git a/pkgs/community/swarmauri_vectorstore_qdrant/pyproject.toml b/pkgs/community/swarmauri_vectorstore_qdrant/pyproject.toml index 957484d10..ff9d5f363 100644 --- a/pkgs/community/swarmauri_vectorstore_qdrant/pyproject.toml +++ b/pkgs/community/swarmauri_vectorstore_qdrant/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_qdrant" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Persistent Qdrant Vector Store" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } swarmauri_embedding_doc2vec = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} # Dependencies diff --git a/pkgs/community/swarmauri_vectorstore_redis/pyproject.toml b/pkgs/community/swarmauri_vectorstore_redis/pyproject.toml index 2dbb9f73b..87a82a52c 100644 --- a/pkgs/community/swarmauri_vectorstore_redis/pyproject.toml +++ b/pkgs/community/swarmauri_vectorstore_redis/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_redis" -version = "0.6.1.dev9" +version = "0.6.1" description = "Swarmauri Redis Vector Store" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies redis = "^4.0" diff --git a/pkgs/community/swm_example_community_package/pyproject.toml b/pkgs/community/swm_example_community_package/pyproject.toml index 06e9922e7..5c24e3697 100644 --- a/pkgs/community/swm_example_community_package/pyproject.toml +++ b/pkgs/community/swm_example_community_package/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swm_example_community_package" -version = "0.6.1.dev9" +version = "0.6.1" description = "example community package" authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/core/pyproject.toml b/pkgs/core/pyproject.toml index 9c9426644..3d982f6dd 100644 --- a/pkgs/core/pyproject.toml +++ b/pkgs/core/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-core" -version = "0.6.1.dev9" +version = "0.6.1" description = "This repository includes core interfaces for the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/vector_stores/SqliteVectorStore.py b/pkgs/experimental/sqlitevectorstore/SqliteVectorStore.py similarity index 97% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/vector_stores/SqliteVectorStore.py rename to pkgs/experimental/sqlitevectorstore/SqliteVectorStore.py index dd8075425..420ef4dca 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/vector_stores/SqliteVectorStore.py +++ b/pkgs/experimental/sqlitevectorstore/SqliteVectorStore.py @@ -13,7 +13,7 @@ VectorStoreSaveLoadMixin, ) from swarmauri_core.ComponentBase import ComponentBase - +from swarmauri_standard.embeddings.TfidfEmbedding import TfidfEmbedding from swarmauri_standard.distances.CosineDistance import CosineDistance from swarmauri_standard.documents.Document import Document from swarmauri_standard.vectors.Vector import Vector @@ -26,9 +26,11 @@ class SqliteVectorStore( type: Literal["SqliteVectorStore"] = "SqliteVectorStore" db_path: str = tempfile.NamedTemporaryFile(suffix=".db", delete=False).name + def __init__(self, db_path: Optional[str] = None, **kwargs): super().__init__(**kwargs) self._distance = CosineDistance() + self._embedder = TfidfEmbedding() self.documents: List[Document] = [] if db_path is not None: self.db_path = db_path diff --git a/pkgs/experimental/sqlitevectorstore/SqliteVectorStore_unit_test.py b/pkgs/experimental/sqlitevectorstore/SqliteVectorStore_unit_test.py new file mode 100644 index 000000000..ef2206071 --- /dev/null +++ b/pkgs/experimental/sqlitevectorstore/SqliteVectorStore_unit_test.py @@ -0,0 +1,36 @@ +import pytest +from swarmauri_standard.documents.Document import Document +from swarmauri_standard.vector_stores.SqliteVectorStore import SqliteVectorStore + + +@pytest.mark.unit +def test_ubc_resource(): + vs = SqliteVectorStore() + assert vs.resource == "VectorStore" + assert vs.embedder.resource == "Embedding" + + +@pytest.mark.unit +def test_ubc_type(): + vs = SqliteVectorStore() + assert vs.type == "SqliteVectorStore" + + +@pytest.mark.unit +def test_serialization(): + vs = SqliteVectorStore() + assert vs.id == SqliteVectorStore.model_validate_json(vs.model_dump_json()).id + + +@pytest.mark.unit +def test_top_k(): + vs = SqliteVectorStore() + documents = [ + Document(content="test"), + Document(content="test1"), + Document(content="test2"), + Document(content="test3"), + ] + + vs.add_documents(documents) + assert len(vs.retrieve(query="test", top_k=2)) == 2 diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/prompt_templates/__init__.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/README.md similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/prompt_templates/__init__.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/README.md diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/pyproject.toml b/pkgs/experimental/swarmauri_experimental/ptree_dag/pyproject.toml new file mode 100644 index 000000000..a11cbd098 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/pyproject.toml @@ -0,0 +1,51 @@ +[tool.poetry] +name = "swarmauri_ptree_dag" +version = "0.1.0" +description = "." +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "https://github.com/swarmauri/swarmauri-sdk/pkgs/experimental/ptree_dag/swarmauri_ptree_dag" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] +keywords = [""] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/standards/swarmauri_standard"} +inflect = "*" + +# Third-party dependencies +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +markers = [ + "unit: Unit tests", + "i9n: Integration tests", + "xfail: Expected failures", + "xpass: Expected passes" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/DeepInfraModel.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/DeepInfraModel.py similarity index 100% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/DeepInfraModel.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/DeepInfraModel.py diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/Jinja2PromptTemplate.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/Jinja2PromptTemplate.py similarity index 100% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/Jinja2PromptTemplate.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/Jinja2PromptTemplate.py diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/LlamaCppModel.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/LlamaCppModel.py new file mode 100644 index 000000000..0fb530c40 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/LlamaCppModel.py @@ -0,0 +1,382 @@ +import asyncio +import json +from typing import AsyncIterator, Dict, Iterator, List, Literal, Optional + +import httpx +from pydantic import PrivateAttr, SecretStr +from swarmauri_base.llms.LLMBase import LLMBase +from swarmauri_base.messages.MessageBase import MessageBase +from swarmauri_core.ComponentBase import ComponentBase, SubclassUnion + +from swarmauri_standard.messages.AgentMessage import AgentMessage +from swarmauri_standard.utils.retry_decorator import retry_on_status_codes + + +@ComponentBase.register_type(LLMBase, "LlamaCppModel") +class LlamaCppModel(LLMBase): + """ + A class for interacting with DeepInfra's model API for text generation. + + This implementation uses httpx for both synchronous and asynchronous HTTP requests, + providing support for predictions, streaming responses, and batch processing. + + Attributes: + api_key (str): DeepInfra API key for authentication + Can be obtained from: https://deepinfra.com/dash/api_keys + + allowed_models (List[str]): List of supported model identifiers on DeepInfra + Full list available at: https://deepinfra.com/models/text-generation + + name (str): The currently selected model name + Defaults to "Qwen/Qwen2-72B-Instruct" + + type (Literal["DeepInfraModel"]): Type identifier for the model class + + Link to Allowed Models: https://deepinfra.com/models/text-generation + Link to API KEY: https://deepinfra.com/dash/api_keys + """ + + _BASE_URL: str = PrivateAttr("http://localhost:8080/v1") + _client: httpx.Client = PrivateAttr(default=None) + _async_client: httpx.AsyncClient = PrivateAttr(default=None) + + api_key: Optional[SecretStr] = None + allowed_models: List[str] = [] + + name: str = "" + + type: Literal["LlamaCppModel"] = "LlamaCppModel" + + timeout: float = 1200.0 + + def __init__(self, **data): + """ + Initializes the DeepInfraModel instance with the provided API key + and sets up httpx clients for both sync and async operations. + + Args: + **data: Keyword arguments for model initialization. + """ + super().__init__(**data) + if self.api_key: + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.api_key.get_secret_value()}", + } + else: + headers = { + "Content-Type": "application/json" + } + self._client = httpx.Client( + headers=headers, base_url=self._BASE_URL, timeout=self.timeout + ) + self._async_client = httpx.AsyncClient( + headers=headers, base_url=self._BASE_URL, timeout=self.timeout + ) + + self.allowed_models = self.allowed_models or self.get_allowed_models() + self.name = self.allowed_models[0] + + def _format_messages( + self, messages: List[SubclassUnion[MessageBase]] + ) -> List[Dict[str, str]]: + """ + Formats conversation history into a list of dictionaries suitable for API requests. + + Args: + messages (List[SubclassUnion[MessageBase]]): The conversation history. + + Returns: + List[Dict[str, str]]: Formatted message list. + """ + message_properties = ["content", "role", "name"] + return [ + message.model_dump(include=message_properties, exclude_none=True) + for message in messages + ] + + def _create_request_payload( + self, + formatted_messages: List[Dict[str, str]], + temperature: float, + max_tokens: int, + enable_json: bool, + stop: List[str] = None, + stream: bool = False, + ) -> Dict: + """ + Creates the payload for the API request. + + Args: + formatted_messages (List[Dict[str, str]]): Formatted messages for the conversation. + temperature (float): Sampling temperature for the response. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Whether to enable JSON response format. + stop (List[str], optional): Stop sequences. + stream (bool): Whether to stream the response. + + Returns: + Dict: Payload for the API request. + """ + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "max_tokens": max_tokens, + "top_p": 1, + "frequency_penalty": 0, + "presence_penalty": 0, + "stream": stream, + } + + if stop: + payload["stop"] = stop + + if enable_json: + payload["response_format"] = {"type": "json_object"} + + return payload + + @retry_on_status_codes((429, 529), max_retries=1) + def predict( + self, + conversation, + temperature=0.7, + max_tokens=256, + enable_json=False, + stop: List[str] = None, + ): + """ + Sends a synchronous request to generate a response from the model. + + Args: + conversation: The conversation object containing message history. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Flag for enabling JSON response format. + stop (List[str], optional): Stop sequences for the response. + + Returns: + Updated conversation with the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = self._create_request_payload( + formatted_messages, temperature, max_tokens, enable_json, stop + ) + + response = self._client.post("/chat/completions", json=payload) + response.raise_for_status() + + result = response.json() + message_content = result["choices"][0]["message"]["content"] + conversation.add_message(AgentMessage(content=message_content)) + + return conversation + + @retry_on_status_codes((429, 529), max_retries=1) + async def apredict( + self, + conversation, + temperature=0.7, + max_tokens=256, + enable_json=False, + stop: List[str] = None, + ): + """ + Sends an asynchronous request to generate a response from the model. + + Args: + conversation: The conversation object containing message history. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Flag for enabling JSON response format. + stop (List[str], optional): Stop sequences for the response. + + Returns: + Updated conversation with the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = self._create_request_payload( + formatted_messages, temperature, max_tokens, enable_json, stop + ) + + response = await self._async_client.post("/chat/completions", json=payload) + response.raise_for_status() + + result = response.json() + message_content = result["choices"][0]["message"]["content"] + conversation.add_message(AgentMessage(content=message_content)) + + return conversation + + @retry_on_status_codes((429, 529), max_retries=1) + def stream( + self, + conversation, + temperature=0.7, + max_tokens=256, + stop: List[str] = None, + ) -> Iterator[str]: + """ + Streams response content from the model synchronously. + + Args: + conversation: The conversation object containing message history. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + stop (List[str], optional): Stop sequences for the response. + + Yields: + str: Chunks of content from the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = self._create_request_payload( + formatted_messages, temperature, max_tokens, False, stop, stream=True + ) + + with self._client.stream("POST", "/chat/completions", json=payload) as response: + response.raise_for_status() + collected_content = [] + + for line in response.iter_lines(): + # Convert bytes to string if necessary + if isinstance(line, bytes): + line = line.decode("utf-8") + + if line.startswith("data: "): + line = line[6:] # Remove 'data: ' prefix + if line != "[DONE]": + chunk = json.loads(line) + if chunk["choices"][0]["delta"].get("content"): + content = chunk["choices"][0]["delta"]["content"] + collected_content.append(content) + yield content + + full_content = "".join(collected_content) + conversation.add_message(AgentMessage(content=full_content)) + + @retry_on_status_codes((429, 529), max_retries=1) + async def astream( + self, + conversation, + temperature=0.7, + max_tokens=256, + stop: List[str] = None, + ) -> AsyncIterator[str]: + """ + Streams response content from the model asynchronously. + + Args: + conversation: The conversation object containing message history. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + stop (List[str], optional): Stop sequences for the response. + + Yields: + str: Chunks of content from the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = self._create_request_payload( + formatted_messages, temperature, max_tokens, False, stop, stream=True + ) + + async with self._async_client.stream( + "POST", "/chat/completions", json=payload + ) as response: + response.raise_for_status() + collected_content = [] + + async for line in response.aiter_lines(): + if line.startswith("data: "): + line = line[6:] # Remove 'data: ' prefix + if line != "[DONE]": + chunk = json.loads(line) + if chunk["choices"][0]["delta"].get("content"): + content = chunk["choices"][0]["delta"]["content"] + collected_content.append(content) + yield content + + full_content = "".join(collected_content) + conversation.add_message(AgentMessage(content=full_content)) + + def batch( + self, + conversations: List, + temperature=0.7, + max_tokens=256, + enable_json=False, + stop: List[str] = None, + ) -> List: + """ + Processes multiple conversations in batch synchronously. + + Args: + conversations (List): List of conversation objects. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Flag for enabling JSON response format. + stop (List[str], optional): Stop sequences for responses. + + Returns: + List: List of updated conversations with model responses. + """ + return [ + self.predict( + conv, + temperature=temperature, + max_tokens=max_tokens, + enable_json=enable_json, + stop=stop, + ) + for conv in conversations + ] + + async def abatch( + self, + conversations: List, + temperature=0.7, + max_tokens=256, + enable_json=False, + stop: List[str] = None, + max_concurrent=5, + ) -> List: + """ + Processes multiple conversations asynchronously, with concurrency control. + + Args: + conversations (List): List of conversation objects. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Flag for enabling JSON response format. + stop (List[str], optional): Stop sequences for responses. + max_concurrent (int): Maximum number of concurrent tasks. + + Returns: + List: List of updated conversations with model responses. + """ + semaphore = asyncio.Semaphore(max_concurrent) + + async def process_conversation(conv): + async with semaphore: + return await self.apredict( + conv, + temperature=temperature, + max_tokens=max_tokens, + enable_json=enable_json, + stop=stop, + ) + + tasks = [process_conversation(conv) for conv in conversations] + return await asyncio.gather(*tasks) + + def get_allowed_models(self) -> List[str]: + """ + Queries the LLMProvider API endpoint to get the list of allowed models. + + Returns: + List[str]: List of allowed model identifiers. + """ + response = self._client.get("/models") + response.raise_for_status() + models = response.json() + return [model["id"] for model in models] diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/O1Model.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/O1Model.py similarity index 100% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/O1Model.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/O1Model.py diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/core.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/core.py similarity index 100% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/core.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/core.py diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/dependencies.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/dependencies.py similarity index 100% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/dependencies.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/dependencies.py diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/external.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/external.py similarity index 85% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/external.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/external.py index ad9323421..79841ae58 100644 --- a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/external.py +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/external.py @@ -38,25 +38,34 @@ def call_external_agent(prompt: str, agent_env: Dict[str, str]) -> str: Note: This is a placeholder implementation. Replace this with an actual API call to your LLM provider. """ + print(os.getenv("PROVIDER", agent_env.get("provider", None))) + print(agent_env) from .O1Model import O1Model from .DeepInfraModel import DeepInfraModel + from .LlamaCppModel import LlamaCppModel from swarmauri.agents.RagAgent import RagAgent from swarmauri.vector_stores.TfidfVectorStore import TfidfVectorStore # For demonstration purposes, we simply log the prompt and return a dummy response. truncated_prompt = prompt + "..." if len(prompt) > 100 else prompt print(f"[INFO] Sending prompt to external agent: \n{truncated_prompt}") + llm = DeepInfraModel(api_key=os.getenv("DEEPINFRA_API_KEY"), name="meta-llama/Meta-Llama-3.1-405B-Instruct") - system_context = "You are a helpful assistant." + system_context = "You are a software developer." agent = RagAgent(llm=llm, vector_store=TfidfVectorStore(), system_context=system_context) - if os.getenv("PROVIDER", agent_env.get("provider", "DeepInfra")) != "DeepInfra": + if os.getenv("PROVIDER", agent_env.get("provider", None)) == "Openai": llm = O1Model(api_key=os.getenv("API_KEY"), name=agent_env.get("model_name", "o3-mini")) agent.llm = llm result = agent.exec(prompt, top_k=0) + elif os.getenv("PROVIDER", agent_env.get("provider", None)) == "LlamaCpp": + llm = LlamaCppModel(allowed_models=['localhost'], name="localhost") + agent.llm = llm + result = agent.exec(prompt, top_k=0, llm_kwargs={"max_tokens": 3000}) else: agent.llm.name == agent_env.get("model_name", "meta-llama/Meta-Llama-3.1-405B-Instruct") result = agent.exec(prompt, top_k=0, llm_kwargs={"max_tokens": 3000}) + content = chunk_content(result) del agent diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/graph.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/graph.py similarity index 100% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/graph.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/graph.py diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/processing.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/processing.py similarity index 99% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/processing.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/processing.py index 72e81c1a6..dbca9b4e0 100644 --- a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/processing.py +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/processing.py @@ -110,7 +110,7 @@ def _process_file(file_record: Dict[str, Any], # Determine the agent prompt template. agent_prompt_template_name = file_record.get("AGENT_PROMPT_TEMPLATE", "agent_default.j2") agent_prompt_template_path = os.path.join(template_dir, agent_prompt_template_name) - content = _render_generate_template(file_record, context, agent_prompt_template_path) + content = _render_generate_template(file_record, context, agent_prompt_template_path, agent_env) else: print(f"[WARNING] Unknown PROCESS_TYPE '{process_type}' for file '{final_filename}'. Skipping.") return diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/rendering.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/rendering.py similarity index 96% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/rendering.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/rendering.py index 0af6f3bbc..519de6e3d 100644 --- a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/rendering.py +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/rendering.py @@ -96,7 +96,9 @@ def _render_copy_template(file_record: Dict[str, Any], context: Dict[str, Any]) def _render_generate_template(file_record: Dict[str, Any], context: Dict[str, Any], - agent_prompt_template: str) -> str: + agent_prompt_template: str, + agent_env: Dict[str, str] = {}, + ) -> str: """ Renders the agent prompt template for a "GENERATE" operation using the provided context, then calls an external agent (e.g., an LLM) to generate file content. @@ -116,7 +118,7 @@ def _render_generate_template(file_record: Dict[str, Any], # Call the external agent to generate content. # Here we assume a function call_external_agent exists in external.py. from .external import call_external_agent - rendered_content = call_external_agent(rendered_prompt, context.get("agent_env", {})) + rendered_content = call_external_agent(rendered_prompt, agent_env) return rendered_content except Exception as e: print(f"[ERROR] Failed to render generate template '{agent_prompt_template}': {e}") diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/agent_default.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/agent_default.j2 new file mode 100644 index 000000000..0e03ae749 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/agent_default.j2 @@ -0,0 +1,65 @@ +I need you to generate a file named "{{ RENDERED_FILE_NAME }}". This file consists of a complete concrete implementation within the "{{ PACKAGE_ROOT }}" package. + +Below are the details and requirements for this task: + +1. **Purpose / Description**: + {{ PURPOSE }} + + {{ DESCRIPTION }} + +2. **Requirements / Constraints**: + {%- for requirement in REQUIREMENTS %} + - {{ requirement }} + {%- endfor %} + + +3. **File and Code Preferences**: + - The code follows PEP 8 style guidelines. + - All functions, classes, and methods include clear docstrings that explain their functionality. + - Include module-level docstrings to explain the purpose of modules. + - Type hints are used throughout the code. + - Critical sections of the code include comments explaining the logic. + - Assume the user will copy-paste this code as-is into their project. + - Provide the concrete fully functional implementation of the code. + - All methods must feature a complete implementation. + + +4. **Behavior**: + - Must inherit the base class. + - Must provide fully functional implementations for all required methods. + +### Desired Output +Produce a single code snippet that contains the entire content for the file {{ RENDERED_FILE_NAME }}. +Do not wrap it in triple backticks! Just provide the raw code ready to copy-paste. +Do not provide an explanation afterward. + +{# At the bottom of the template, include any example files specified by the EXAMPLES array #} +{% if EXAMPLES %} +#### +Leverage the reference file below as a guideline to utilize when creating the new component. + +{%- for example in EXAMPLES %} +#### Reference File: `{{ example }}` +``` +{% include example %} + +``` +{%- endfor %} +{% endif %} + +### Example Format of the Output + +{% include FILE_NAME %} + +{# At the bottom of the template, include any dependency files specified by the DEPENDENCIES array #} +{% if RENDERED_DEPENDENCIES %} +### Dependencies +{%- for dependency in RENDERED_DEPENDENCIES %} +#### `{{ dependency }}` +``` +{% include dependency %} + +``` + +{%- endfor %} +{% endif %} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/llm-payload.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/llm-payload.json new file mode 100644 index 000000000..7f1c41e35 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/llm-payload.json @@ -0,0 +1,77 @@ +[ + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "PURPOSE": "Implements the new component's main class.", + "DESCRIPTION": "This file defines the primary class, referencing a base class and mixin.", + "REQUIREMENTS": [ + "Ensure the base class and mixin logic is correctly integrated.", + "Ensure that all methods include complete, comprehensive, and full functional implementation logic.", + "Use HTTPX." + ], + "DEPENDENCIES": ["base/swarmauri_base/llms/LLMBase.py"], + "PROCESS_TYPE": "GENERATE", + "BASE_CLASS": "LLMBase", + "MIXINS": [], + "EXAMPLES": ["standards/swarmauri_standard/swarmauri_standard/llms/GroqModel.py"] + + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py", + "PURPOSE": "Initializes the new component's package.", + "DESCRIPTION": "Exposes and imports the main class from {{ MODULE_NAME }}.py.", + "REQUIREMENTS": ["Should import the class defined in {{ MODULE_NAME }}.py."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}//swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/tests/unit/test_{{ MODULE_NAME }}.py", + "PURPOSE": "Contains pytest based test cases for the new component class.", + "DESCRIPTION": "Verifies the functionality and correctness of the class in {{ MODULE_NAME }}.py.", + "REQUIREMENTS": ["Should import and test the class from the main module."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/tests/unit/test___init__.py", + "PURPOSE": "Contains pytest based test cases for the package initialization.", + "DESCRIPTION": "Ensures __init__.py correctly exposes the new component class.", + "REQUIREMENTS": ["Should import from the package's __init__.py file."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/README.md", + "PURPOSE": "Provides documentation and usage details for the new component.", + "DESCRIPTION": "Includes instructions, examples, and references for {{ MODULE_NAME }}.py", + "REQUIREMENTS": [ + "Include required header fragment.", + "Should mention how to install, configure, and integrate the new component." + ], + "DEPENDENCIES": [ + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py" + ], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/LICENSE", + "PURPOSE": "Defines the licensing terms for this project.", + "DESCRIPTION": "Contains the open-source (or proprietary) LICENSE text.", + "REQUIREMENTS": [], + "DEPENDENCIES": [], + "PROCESS_TYPE": "COPY" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}//swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/pyproject.toml", + "PURPOSE": "Specifies Python project metadata and dependencies.", + "DESCRIPTION": "Indicates the project’s name, version, dependencies, and build system.", + "REQUIREMENTS": ["Must reference the main module, README, and LICENSE as applicable."], + "DEPENDENCIES": [ + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/README.md", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/LICENSE" + ], + "PROCESS_TYPE": "GENERATE" + } +] \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/parsers-projects_payloads.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/parsers-projects_payloads.json new file mode 100644 index 000000000..133e6d83e --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/parsers-projects_payloads.json @@ -0,0 +1,666 @@ +[ + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Markdown (.md) files, facilitating documentation generation and content management.", + "PACKAGE_ROOT": "swarmauri_parser_markdown", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "MarkdownParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Markdown content and convert it into HTML or other formats.", + "Handle various Markdown syntax variations and extensions.", + "Support integration with documentation generation pipelines.", + "Allow for customization of parsing rules and output formats.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "markdown", "version": "*" }, + { "name": "mistune", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from LaTeX (.tex) files, facilitating scientific document generation and typesetting.", + "PACKAGE_ROOT": "swarmauri_parser_latex", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "LaTeXParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse LaTeX content and extract structural elements like sections, equations, and references.", + "Handle various LaTeX packages and commands.", + "Support conversion to other formats like HTML or PDF.", + "Allow integration with scientific document generation pipelines.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pylatex", "version": "*" }, + { "name": "plasTeX", "version": "*" }, + { "name": "latexcodec", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from PDF (.pdf) files, facilitating document analysis, text extraction, and metadata processing.", + "PACKAGE_ROOT": "swarmauri_parser_pdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "PDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Extract text and metadata from PDF documents.", + "Handle various PDF structures, including scanned documents and embedded images.", + "Support character encoding variations and text extraction accuracy.", + "Allow integration with document processing pipelines and OCR tools.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "PyPDF2", "version": "*" }, + { "name": "pdfminer.six", "version": "*" }, + { "name": "pdfplumber", "version": "*" }, + { "name": "PyMuPDF", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Microsoft Office (.docx, .xlsx, .pptx) files, facilitating document, spreadsheet, and presentation analysis.", + "PACKAGE_ROOT": "swarmauri_parser_microsoftoffice", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "MicrosoftOfficeParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse .docx, .xlsx, and .pptx files to extract text, data, and metadata.", + "Handle various Office document structures and embedded elements.", + "Support integration with document processing and data analysis pipelines.", + "Allow for customization of parsing rules based on document types.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "python-docx", "version": "*" }, + { "name": "openpyxl", "version": "*" }, + { "name": "python-pptx", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from GeoJSON (.geojson) files, facilitating geospatial data analysis and visualization.", + "PACKAGE_ROOT": "swarmauri_parser_geojson", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "GeoJSONParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse GeoJSON files and extract geographic features and properties.", + "Handle various GeoJSON geometry types and coordinate systems.", + "Support integration with geospatial data processing and visualization pipelines.", + "Allow for customization of parsing rules based on feature properties.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "geojson", "version": "*" }, + { "name": "geopandas", "version": "*" }, + { "name": "shapely", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from GeoPackage (.gpkg) files, facilitating geospatial data storage and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_geopackage", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "GeoPackageParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse GeoPackage files and extract geospatial layers and metadata.", + "Handle various GeoPackage feature types and coordinate systems.", + "Support integration with geospatial data processing and analysis pipelines.", + "Allow for customization of parsing rules based on layer properties.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "geopandas", "version": "*" }, + { "name": "GDAL/OGR", "version": "*" }, + { "name": "shapely", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SAS (.sas7bdat) files, facilitating statistical data analysis and migration from SAS to Python.", + "PACKAGE_ROOT": "swarmauri_parser_sas", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SASParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse SAS .sas7bdat files and convert them into pandas DataFrames.", + "Handle various SAS data types and metadata.", + "Support integration with statistical analysis and data processing pipelines.", + "Allow for customization based on SAS file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "sas7bdat", "version": "*" }, + { "name": "pyreadstat", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SPSS (.sav) files, facilitating statistical data analysis and migration from SPSS to Python.", + "PACKAGE_ROOT": "swarmauri_parser_spss", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SPSSParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse SPSS .sav files and convert them into pandas DataFrames.", + "Handle various SPSS data types and metadata.", + "Support integration with statistical analysis and data processing pipelines.", + "Allow for customization based on SPSS file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyreadstat", "version": "*" }, + { "name": "savReaderWriter", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Stata (.dta) files, facilitating statistical data analysis and migration from Stata to Python.", + "PACKAGE_ROOT": "swarmauri_parser_stata", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "StataParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Stata .dta files and convert them into pandas DataFrames.", + "Handle various Stata data types and metadata.", + "Support integration with statistical analysis and data processing pipelines.", + "Allow for customization based on Stata file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyreadstat", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from BSON (.bson) files, facilitating efficient binary data processing and serialization.", + "PACKAGE_ROOT": "swarmauri_parser_bson", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "BSONParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse BSON files and deserialize data into Python objects.", + "Support packing and unpacking of various data types.", + "Handle streaming BSON data for large datasets.", + "Allow integration with binary data processing pipelines.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "bson", "version": "*" }, + { "name": "pymongo", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Parquet (.parquet) files, facilitating efficient columnar data processing and storage.", + "PACKAGE_ROOT": "swarmauri_parser_parquet", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ParquetParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Parquet files and convert them into pandas DataFrames.", + "Handle various Parquet schema versions and data types.", + "Support integration with big data processing and analytics pipelines.", + "Allow for customization based on Parquet file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyarrow", "version": "*" }, + { "name": "fastparquet", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from ORC (.orc) files, facilitating efficient columnar data processing and storage in Hadoop ecosystems.", + "PACKAGE_ROOT": "swarmauri_parser_orc", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ORCParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse ORC files and convert them into pandas DataFrames.", + "Handle various ORC schema versions and data types.", + "Support integration with big data processing and analytics pipelines.", + "Allow for customization based on ORC file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyarrow", "version": "*" }, + { "name": "fastparquet", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Feather (.feather) files, facilitating fast, lightweight data exchange and storage for pandas DataFrames.", + "PACKAGE_ROOT": "swarmauri_parser_feather", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "FeatherParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Feather files and convert them into pandas DataFrames.", + "Handle various Feather schema versions and data types.", + "Support integration with data analysis and machine learning pipelines.", + "Allow for customization based on Feather file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyarrow", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from HDF5 (.h5, .hdf5) files, facilitating high-performance storage and retrieval of large-scale scientific data.", + "PACKAGE_ROOT": "swarmauri_parser_hdf5", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "HDF5Parser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse HDF5 files and convert datasets into Python objects or pandas DataFrames.", + "Handle complex HDF5 group and dataset structures.", + "Support integration with scientific data processing and analysis pipelines.", + "Allow for customization based on HDF5 file structures and metadata.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "h5py", "version": "*" }, + { "name": "PyTables", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from NetCDF (.nc) files, facilitating storage and analysis of array-oriented scientific data.", + "PACKAGE_ROOT": "swarmauri_parser_netcdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "NetCDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse NetCDF files and convert datasets into Python objects or xarray Datasets.", + "Handle various NetCDF dimensions, variables, and metadata.", + "Support integration with scientific data processing and analysis pipelines.", + "Allow for customization based on NetCDF file structures and metadata.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "netCDF4", "version": "*" }, + { "name": "xarray", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SDF (.sdf) files, facilitating molecular structure analysis in cheminformatics applications.", + "PACKAGE_ROOT": "swarmauri_parser_sdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse SDF files and extract molecular structures and properties.", + "Handle various SDF formats and embedded data fields.", + "Support integration with cheminformatics and molecular analysis pipelines.", + "Allow for customization based on molecular data structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "RDKit", "version": "*" }, + { "name": "openbabel", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from RDF (.rdf) files, facilitating semantic web data processing and knowledge graph construction.", + "PACKAGE_ROOT": "swarmauri_parser_rdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "RDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse RDF files and extract triples and semantic relationships.", + "Handle various RDF serialization formats (e.g., RDF/XML, Turtle, N-Triples).", + "Support integration with semantic web and knowledge graph pipelines.", + "Allow for customization based on RDF ontologies and vocabularies.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "rdflib", "version": "*" }, + { "name": "PyLD", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SQLite (.sqlite, .db) files, facilitating database management and data analysis.", + "PACKAGE_ROOT": "swarmauri_parser_sqlite", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SQLiteParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Connect to SQLite databases and execute SQL queries to extract data.", + "Handle various SQLite schemas and data types.", + "Support integration with data analysis and processing pipelines.", + "Allow for customization of SQL queries based on data extraction needs.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "sqlite3", "version": "*" }, + { "name": "SQLAlchemy", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and execute SQL (.sql) scripts, facilitating database migrations, schema management, and data seeding.", + "PACKAGE_ROOT": "swarmauri_parser_sql", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SQLParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse and execute SQL scripts to create, modify, and populate databases.", + "Handle various SQL dialects and commands.", + "Support integration with database migration and management pipelines.", + "Allow for customization of SQL execution based on environment configurations.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "sqlparse", "version": "*" }, + { "name": "SQLAlchemy", "version": "*" }, + { "name": "sqlite3", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from EBML (.ebml) files, facilitating binary data processing and analysis in multimedia applications.", + "PACKAGE_ROOT": "swarmauri_parser_ebml", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "EBMLParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse EBML files and extract binary data elements and metadata.", + "Handle various EBML structures and element types.", + "Support integration with multimedia processing and analysis pipelines.", + "Allow for customization based on EBML schema definitions.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyebml", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Executable Formats (.exe, .dll) files, facilitating binary analysis and reverse engineering.", + "PACKAGE_ROOT": "swarmauri_parser_executable", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ExecutableParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Executable (.exe) and Dynamic Link Library (.dll) files to extract metadata and binary structures.", + "Handle various executable formats and architectures.", + "Support integration with reverse engineering and binary analysis pipelines.", + "Allow for customization based on executable file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pefile", "version": "*" }, + { "name": "lief", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Syslog (.syslog) files, facilitating system monitoring and log analysis.", + "PACKAGE_ROOT": "swarmauri_parser_syslog", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SyslogParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Syslog files and extract log entries with structured data.", + "Handle various Syslog message formats and severity levels.", + "Support integration with system monitoring and log analysis pipelines.", + "Allow for customization based on Syslog message patterns.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyparsing", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Audio files (.mp3, .wav, .flac), facilitating audio metadata extraction and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_audio", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "AudioParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse audio files and extract metadata, such as artist, album, and track information.", + "Handle various audio codecs and formats.", + "Support integration with audio analysis and processing pipelines.", + "Allow for customization based on audio file metadata structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pydub", "version": "*" }, + { "name": "mutagen", "version": "*" }, + { "name": "librosa", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Video files (.mp4, .avi, .mkv), facilitating video metadata extraction and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_video", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "VideoParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse video files and extract metadata, such as codec information, duration, and resolution.", + "Handle various video codecs and formats.", + "Support integration with video analysis and processing pipelines.", + "Allow for customization based on video file metadata structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "opencv-python", "version": "*" }, + { "name": "moviepy", "version": "*" }, + { "name": "ffmpeg-python", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from INI (.ini) files, facilitating application configuration management and data extraction.", + "PACKAGE_ROOT": "swarmauri_parser_ini", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "INIParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse INI files and extract configuration settings and parameters.", + "Handle various INI file structures and sections.", + "Support integration with configuration management and application setup pipelines.", + "Allow for customization based on INI file structures and key-value pairs.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "configparser", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Image files (.png, .jpg, .jpeg, .bmp, .gif, .tiff), facilitating image metadata extraction and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_image", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ImageParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse image files and extract metadata, such as EXIF data, resolution, and color profiles.", + "Handle various image formats and encoding schemes.", + "Support integration with image processing and analysis pipelines.", + "Allow for customization based on image metadata structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "Pillow", "version": "*" }, + { "name": "imageio", "version": "*" }, + { "name": "opencv-python", "version": "*" }, + { "name": "logging", "version": "*" } + ] + } +] diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/payload.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/payload.json new file mode 100644 index 000000000..2f1d4744a --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/payload.json @@ -0,0 +1,79 @@ +[ + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "PURPOSE": "Implements the new component's main class.", + "DESCRIPTION": "This file defines the primary class, referencing a base class and mixin.", + "REQUIREMENTS": [ + "Ensure the base class and mixin logic is correctly integrated.", + "Ensure that all methods include complete, comprehensive, and full functional implementation logic." + ], + "DEPENDENCIES": ["base/swarmauri_base/tools/ToolBase.py"], + "PROCESS_TYPE": "GENERATE", + "BASE_CLASS": "ToolBase", + "MIXINS": [], + "EXAMPLES": ["standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py"] + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py", + "PURPOSE": "Initializes the new component's package.", + "DESCRIPTION": "Exposes and imports the main class from {{ MODULE_NAME }}.py.", + "REQUIREMENTS": ["Should import the class defined in {{ MODULE_NAME }}.py."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}//swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/tests/unit/test_{{ MODULE_NAME }}.py", + "PURPOSE": "Contains pytest based test cases for the new component class.", + "DESCRIPTION": "Verifies the functionality and correctness of the class in {{ MODULE_NAME }}.py.", + "REQUIREMENTS": ["Should import and test the class from the main module."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/tests/unit/test___init__.py", + "PURPOSE": "Contains pytest based test cases for the package initialization.", + "DESCRIPTION": "Ensures __init__.py correctly exposes the new component class.", + "REQUIREMENTS": ["Should import from the package's __init__.py file."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/README.rst", + "PURPOSE": "Create a comphrensive and appealing documentation, detailing usage of the new component.", + "DESCRIPTION": "Includes branding header, installation and usage instructions, and examples for {{ MODULE_NAME }}.py", + "REQUIREMENTS": [ + "Include all of the HTML header shown in the example output.", + "Should mention how to install and use the new component.", + "Provide an example of usage." + ], + "EXAMPLES": [ + "README.md" + ], + "DEPENDENCIES": [ + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py" + ], + "PROCESS_TYPE": "COPY" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/LICENSE", + "PURPOSE": "Defines the licensing terms for this project.", + "DESCRIPTION": "Contains the open-source (or proprietary) LICENSE text.", + "REQUIREMENTS": [], + "DEPENDENCIES": [], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}//swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/pyproject.toml", + "PURPOSE": "Specifies Python project metadata and dependencies.", + "DESCRIPTION": "Indicates the project’s name, version, dependencies, and build system.", + "REQUIREMENTS": ["Must reference the main module, README, and LICENSE as applicable."], + "DEPENDENCIES": [ + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/README.rst", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/LICENSE" + ], + "PROCESS_TYPE": "GENERATE" + } +] \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/payload.json.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/payload.json.j2 new file mode 100644 index 000000000..ab014417d --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/payload.json.j2 @@ -0,0 +1,114 @@ +[ + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py{% endraw %}", + "PURPOSE": "Implements the new component's main class.", + "DESCRIPTION": "This file defines the concrete class implementation.", + "REQUIREMENTS": [ + "Provide implementation logic for inherited methods when applicable.", + "Ensure that all methods include complete, comprehensive, and fully functional implementation logic." + {% if ADDITIONAL_REQUIREMENTS %} + {% for requirement in ADDITIONAL_REQUIREMENTS %}, + "{{ requirement }}" + {% endfor %} + {% endif %} + ], + "DEPENDENCIES": [ + {% if BASE_CLASS_DEPENDENCY_FILE %} + "{% raw %}{{ BASE_CLASS_DEPENDENCY_FILE }}{% endraw %}" + {% if EXTERNAL_DOC_FILE %} + {% for doc_item in EXTERNAL_DOC_FILE %} + , "{% raw %}{{ doc_item }}{% endraw %}" + {% endfor %} + {% endif %} + {% elif EXTERNAL_DOC_FILE %} + {% for doc_item in EXTERNAL_DOC_FILE %} + "{% raw %}{{ doc_item }}{% endraw %}"{% if not loop.last %}, {% endif %} + {% endfor %} + {% endif %} + ], + "PROCESS_TYPE": "GENERATE", + "BASE_CLASS": "{{ BASE_CLASS_NAME }}", + "MIXINS": [], + "EXAMPLES": [ + {% if CONCRETE_EXAMPLE_FILE %} + "{{ CONCRETE_EXAMPLE_FILE }}" + {% if EXTERNAL_DOC_EXAMPLE_FILE %} + , "{{ EXTERNAL_DOC_EXAMPLE_FILE }}" + {% endif %} + {% elif EXTERNAL_DOC_EXAMPLE_FILE %} + "{{ EXTERNAL_DOC_EXAMPLE_FILE }}" + {% endif %} + ] + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py{% endraw %}", + "PURPOSE": "Initializes the new component's package.", + "DESCRIPTION": "Exposes and imports the main class from {{ MODULE_NAME }}.py.", + "REQUIREMENTS": [ + "Should import the class defined in {{ MODULE_NAME }}.py." + ], + "DEPENDENCIES": [], + "PROCESS_TYPE": "COPY" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test_{{ MODULE_NAME }}.py{% endraw %}", + "PURPOSE": "Contains pytest based test cases for the new component class.", + "DESCRIPTION": "Verifies the functionality and correctness of the class in {{ MODULE_NAME }}.py.", + "REQUIREMENTS": [ + "Should import and test the class from the main module." + ], + "DEPENDENCIES": [ + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py{% endraw %}" + ], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test___init__.py{% endraw %}", + "PURPOSE": "Contains pytest based test cases for the package initialization.", + "DESCRIPTION": "Ensures __init__.py correctly exposes the new component class.", + "REQUIREMENTS": [ + "Should import from the package's __init__.py file." + ], + "DEPENDENCIES": [ + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py{% endraw %}" + ], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/README.md{% endraw %}", + "PURPOSE": "Create a comprehensive README.md, detailing usage of the new component.", + "DESCRIPTION": "Includes branding header, installation and usage instructions, and examples for {{ MODULE_NAME }}.py", + "REQUIREMENTS": [ + "Include all of the HTML header shown in the example output.", + "Based on the dependencies, create comprehensive installation and usage documentation sections.", + "The installation and usage documentation sections should be significantly more complete than the example format provided.", + "Do not include details regarding cloning or forking the repository." + ], + "DEPENDENCIES": [ + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml{% endraw %}" + ], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE{% endraw %}", + "PURPOSE": "Defines the licensing terms for this project.", + "DESCRIPTION": "Contains the open-source (or proprietary) LICENSE text.", + "PROCESS_TYPE": "COPY" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml{% endraw %}", + "PURPOSE": "Specifies Python project metadata and dependencies.", + "DESCRIPTION": "Indicates the project’s name, version, dependencies, and build system.", + "REQUIREMENTS": [ + "Must reference the main module, README, and LICENSE as applicable." + ], + "DEPENDENCIES": [ + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE{% endraw %}" + ], + "PROCESS_TYPE": "GENERATE" + } +] diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/projects_payloads.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/projects_payloads.json new file mode 100644 index 000000000..b2a0965e5 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/projects_payloads.json @@ -0,0 +1,640 @@ +[ + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Markdown (.md) files, facilitating documentation generation and content management.", + "PACKAGE_ROOT": "swarmauri_parser_markdown", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "MarkdownParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Markdown content and convert it into HTML or other formats.", + "Handle various Markdown syntax variations and extensions.", + "Support integration with documentation generation pipelines.", + "Allow for customization of parsing rules and output formats.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "markdown", "version": "*" }, + { "name": "mistune", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from LaTeX (.tex) files, facilitating scientific document generation and typesetting.", + "PACKAGE_ROOT": "swarmauri_parser_latex", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "LaTeXParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse LaTeX content and extract structural elements like sections, equations, and references.", + "Handle various LaTeX packages and commands.", + "Support conversion to other formats like HTML or PDF.", + "Allow integration with scientific document generation pipelines.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pylatex", "version": "*" }, + { "name": "plasTeX", "version": "*" }, + { "name": "latexcodec", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from PDF (.pdf) files, facilitating document analysis, text extraction, and metadata processing.", + "PACKAGE_ROOT": "swarmauri_parser_pdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "PDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Extract text and metadata from PDF documents.", + "Handle various PDF structures, including scanned documents and embedded images.", + "Support character encoding variations and text extraction accuracy.", + "Allow integration with document processing pipelines and OCR tools.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "PyPDF2", "version": "*" }, + { "name": "pdfminer.six", "version": "*" }, + { "name": "pdfplumber", "version": "*" }, + { "name": "PyMuPDF", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Microsoft Office (.docx, .xlsx, .pptx) files, facilitating document, spreadsheet, and presentation analysis.", + "PACKAGE_ROOT": "swarmauri_parser_microsoftoffice", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "MicrosoftOfficeParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse .docx, .xlsx, and .pptx files to extract text, data, and metadata.", + "Handle various Office document structures and embedded elements.", + "Support integration with document processing and data analysis pipelines.", + "Allow for customization of parsing rules based on document types.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "python-docx", "version": "*" }, + { "name": "openpyxl", "version": "*" }, + { "name": "python-pptx", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from GeoJSON (.geojson) files, facilitating geospatial data analysis and visualization.", + "PACKAGE_ROOT": "swarmauri_parser_geojson", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "GeoJSONParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse GeoJSON files and extract geographic features and properties.", + "Handle various GeoJSON geometry types and coordinate systems.", + "Support integration with geospatial data processing and visualization pipelines.", + "Allow for customization of parsing rules based on feature properties.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "geojson", "version": "*" }, + { "name": "geopandas", "version": "*" }, + { "name": "shapely", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from GeoPackage (.gpkg) files, facilitating geospatial data storage and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_geopackage", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "GeoPackageParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse GeoPackage files and extract geospatial layers and metadata.", + "Handle various GeoPackage feature types and coordinate systems.", + "Support integration with geospatial data processing and analysis pipelines.", + "Allow for customization of parsing rules based on layer properties.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "geopandas", "version": "*" }, + { "name": "GDAL/OGR", "version": "*" }, + { "name": "shapely", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SAS (.sas7bdat) files, facilitating statistical data analysis and migration from SAS to Python.", + "PACKAGE_ROOT": "swarmauri_parser_sas", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SASParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse SAS .sas7bdat files and convert them into pandas DataFrames.", + "Handle various SAS data types and metadata.", + "Support integration with statistical analysis and data processing pipelines.", + "Allow for customization based on SAS file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "sas7bdat", "version": "*" }, + { "name": "pyreadstat", "version": "*" }, + { "name": "pandas", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SPSS (.sav) files, facilitating statistical data analysis and migration from SPSS to Python.", + "PACKAGE_ROOT": "swarmauri_parser_spss", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SPSSParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse SPSS .sav files and convert them into pandas DataFrames.", + "Handle various SPSS data types and metadata.", + "Support integration with statistical analysis and data processing pipelines.", + "Allow for customization based on SPSS file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyreadstat", "version": "*" }, + { "name": "savReaderWriter", "version": "*" }, + { "name": "pandas", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Stata (.dta) files, facilitating statistical data analysis and migration from Stata to Python.", + "PACKAGE_ROOT": "swarmauri_parser_stata", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "StataParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Stata .dta files and convert them into pandas DataFrames.", + "Handle various Stata data types and metadata.", + "Support integration with statistical analysis and data processing pipelines.", + "Allow for customization based on Stata file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyreadstat", "version": "*" }, + { "name": "pandas", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from BSON (.bson) files, facilitating efficient binary data processing and serialization.", + "PACKAGE_ROOT": "swarmauri_parser_bson", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "BSONParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse BSON files and deserialize data into Python objects.", + "Support packing and unpacking of various data types.", + "Handle streaming BSON data for large datasets.", + "Allow integration with binary data processing pipelines.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "bson", "version": "*" }, + { "name": "pymongo", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Parquet (.parquet) files, facilitating efficient columnar data processing and storage.", + "PACKAGE_ROOT": "swarmauri_parser_parquet", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ParquetParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Parquet files and convert them into pandas DataFrames.", + "Handle various Parquet schema versions and data types.", + "Support integration with big data processing and analytics pipelines.", + "Allow for customization based on Parquet file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyarrow", "version": "*" }, + { "name": "fastparquet", "version": "*" }, + { "name": "pandas", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from ORC (.orc) files, facilitating efficient columnar data processing and storage in Hadoop ecosystems.", + "PACKAGE_ROOT": "swarmauri_parser_orc", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ORCParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse ORC files and convert them into pandas DataFrames.", + "Handle various ORC schema versions and data types.", + "Support integration with big data processing and analytics pipelines.", + "Allow for customization based on ORC file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyarrow", "version": "*" }, + { "name": "fastparquet", "version": "*" }, + { "name": "pandas", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Feather (.feather) files, facilitating fast, lightweight data exchange and storage for pandas DataFrames.", + "PACKAGE_ROOT": "swarmauri_parser_feather", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "FeatherParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Feather files and convert them into pandas DataFrames.", + "Handle various Feather schema versions and data types.", + "Support integration with data analysis and machine learning pipelines.", + "Allow for customization based on Feather file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyarrow", "version": "*" }, + { "name": "pandas", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from HDF5 (.h5, .hdf5) files, facilitating high-performance storage and retrieval of large-scale scientific data.", + "PACKAGE_ROOT": "swarmauri_parser_hdf5", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "HDF5Parser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse HDF5 files and convert datasets into Python objects or pandas DataFrames.", + "Handle complex HDF5 group and dataset structures.", + "Support integration with scientific data processing and analysis pipelines.", + "Allow for customization based on HDF5 file structures and metadata.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "h5py", "version": "*" }, + { "name": "PyTables", "version": "*" }, + { "name": "pandas", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from NetCDF (.nc) files, facilitating storage and analysis of array-oriented scientific data.", + "PACKAGE_ROOT": "swarmauri_parser_netcdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "NetCDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse NetCDF files and convert datasets into Python objects or xarray Datasets.", + "Handle various NetCDF dimensions, variables, and metadata.", + "Support integration with scientific data processing and analysis pipelines.", + "Allow for customization based on NetCDF file structures and metadata.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "netCDF4", "version": "*" }, + { "name": "xarray", "version": "*" }, + { "name": "pandas", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SDF (.sdf) files, facilitating molecular structure analysis in cheminformatics applications.", + "PACKAGE_ROOT": "swarmauri_parser_sdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse SDF files and extract molecular structures and properties.", + "Handle various SDF formats and embedded data fields.", + "Support integration with cheminformatics and molecular analysis pipelines.", + "Allow for customization based on molecular data structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "RDKit", "version": "*" }, + { "name": "openbabel", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from RDF (.rdf) files, facilitating semantic web data processing and knowledge graph construction.", + "PACKAGE_ROOT": "swarmauri_parser_rdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "RDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse RDF files and extract triples and semantic relationships.", + "Handle various RDF serialization formats (e.g., RDF/XML, Turtle, N-Triples).", + "Support integration with semantic web and knowledge graph pipelines.", + "Allow for customization based on RDF ontologies and vocabularies.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "rdflib", "version": "*" }, + { "name": "PyLD", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SQLite (.sqlite, .db) files, facilitating database management and data analysis.", + "PACKAGE_ROOT": "swarmauri_parser_sqlite", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SQLiteParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Connect to SQLite databases and execute SQL queries to extract data.", + "Handle various SQLite schemas and data types.", + "Support integration with data analysis and processing pipelines.", + "Allow for customization of SQL queries based on data extraction needs.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "sqlite3", "version": "*" }, + { "name": "SQLAlchemy", "version": "*" }, + { "name": "pandas", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and execute SQL (.sql) scripts, facilitating database migrations, schema management, and data seeding.", + "PACKAGE_ROOT": "swarmauri_parser_sql", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SQLParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse and execute SQL scripts to create, modify, and populate databases.", + "Handle various SQL dialects and commands.", + "Support integration with database migration and management pipelines.", + "Allow for customization of SQL execution based on environment configurations.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "sqlparse", "version": "*" }, + { "name": "SQLAlchemy", "version": "*" }, + { "name": "sqlite3", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from EBML (.ebml) files, facilitating binary data processing and analysis in multimedia applications.", + "PACKAGE_ROOT": "swarmauri_parser_ebml", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "EBMLParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse EBML files and extract binary data elements and metadata.", + "Handle various EBML structures and element types.", + "Support integration with multimedia processing and analysis pipelines.", + "Allow for customization based on EBML schema definitions.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyebml", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Executable Formats (.exe, .dll) files, facilitating binary analysis and reverse engineering.", + "PACKAGE_ROOT": "swarmauri_parser_executable", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ExecutableParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Executable (.exe) and Dynamic Link Library (.dll) files to extract metadata and binary structures.", + "Handle various executable formats and architectures.", + "Support integration with reverse engineering and binary analysis pipelines.", + "Allow for customization based on executable file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pefile", "version": "*" }, + { "name": "lief", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Syslog (.syslog) files, facilitating system monitoring and log analysis.", + "PACKAGE_ROOT": "swarmauri_parser_syslog", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SyslogParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Syslog files and extract log entries with structured data.", + "Handle various Syslog message formats and severity levels.", + "Support integration with system monitoring and log analysis pipelines.", + "Allow for customization based on Syslog message patterns.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyparsing", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Audio files (.mp3, .wav, .flac), facilitating audio metadata extraction and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_audio", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "AudioParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse audio files and extract metadata, such as artist, album, and track information.", + "Handle various audio codecs and formats.", + "Support integration with audio analysis and processing pipelines.", + "Allow for customization based on audio file metadata structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pydub", "version": "*" }, + { "name": "mutagen", "version": "*" }, + { "name": "librosa", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Video files (.mp4, .avi, .mkv), facilitating video metadata extraction and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_video", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "VideoParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse video files and extract metadata, such as codec information, duration, and resolution.", + "Handle various video codecs and formats.", + "Support integration with video analysis and processing pipelines.", + "Allow for customization based on video file metadata structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "opencv-python", "version": "*" }, + { "name": "moviepy", "version": "*" }, + { "name": "ffmpeg-python", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from INI (.ini) files, facilitating application configuration management and data extraction.", + "PACKAGE_ROOT": "swarmauri_parser_ini", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "INIParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse INI files and extract configuration settings and parameters.", + "Handle various INI file structures and sections.", + "Support integration with configuration management and application setup pipelines.", + "Allow for customization based on INI file structures and key-value pairs.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "configparser", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Image files (.png, .jpg, .jpeg, .bmp, .gif, .tiff), facilitating image metadata extraction and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_image", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ImageParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse image files and extract metadata, such as EXIF data, resolution, and color profiles.", + "Handle various image formats and encoding schemes.", + "Support integration with image processing and analysis pipelines.", + "Allow for customization based on image metadata structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "Pillow", "version": "*" }, + { "name": "imageio", "version": "*" }, + { "name": "opencv-python", "version": "*" } + ] + } +] diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/tool-payload.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/tool-payload.json new file mode 100644 index 000000000..be41f88a7 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/tool-payload.json @@ -0,0 +1,25 @@ +{ + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A tool to automate the process of sending desktop notifications based on specific triggers or events, enabling timely alerts and reminders without manual intervention.", + "PACKAGE_ROOT": "swarmauri_tool_notificationhandlertool", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "NotificationHandlerTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Provide options to define triggers or events that will initiate notifications (e.g., time-based, file changes).", + "Allow customization of notification content, including titles, messages, and icons.", + "Include functionality to schedule recurring notifications and manage notification priorities.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the tool." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyautogui", "version": "*" }, + { "name": "plyer", "version": "*" }, + { "name": "schedule", "version": "*" }, + { "name": "logging", "version": "*" } + ] +} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE.j2 new file mode 100644 index 000000000..f35554359 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE.j2 @@ -0,0 +1 @@ +EXMAPLE LICENSE 2 \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/README.md.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/README.md.j2 new file mode 100644 index 000000000..21e46fd32 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/README.md.j2 @@ -0,0 +1,28 @@ +![Swamauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - {{ PACKAGE_ROOT }} +

+ +--- + +# `{{ PACKAGE_ROOT }}` + +## Installation + + + +## Usage + + + diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml.j2 new file mode 100644 index 000000000..d5e0d38f8 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml.j2 @@ -0,0 +1,65 @@ +[tool.poetry] +name = "{{ PACKAGE_ROOT }}" +version = "0.1.0.dev1" +description = "{{ PACKAGE_DESCRIPTION }}" +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT}}/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] +keywords = [] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/standards/swarmauri_standard"} + + +# Dependencies + +{% if THIRD_PARTY_DEPENDENCIES %} +{% for dependency in THIRD_PARTY_DEPENDENCIES %} +{{ dependency.name }} = "{{ dependency.version }}" +{% endfor %} +{% endif %} + + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] + +markers = [ + "test: standard test", + "unit: Unit tests", + "integration: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.{{RESOURCE_KIND}}"] +{{ MODULE_NAME|lower }} = "{{ PACKAGE_ROOT }}:{{ MODULE_NAME }}" diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/stt/__init__.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test___init__.py.j2 similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/stt/__init__.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test___init__.py.j2 diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test_{{ MODULE_NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test_{{ MODULE_NAME }}.py.j2 new file mode 100644 index 000000000..e182dce05 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test_{{ MODULE_NAME }}.py.j2 @@ -0,0 +1,12 @@ +import pytest +from {{ PACKAGE_ROOT }}.{{ MODULE_NAME }} import {{ MODULE_NAME }} + + +def test_resource(): + assert {{ MODULE_NAME }}.resource == {{ RESOURCE_KIND }} + +def test_type(): + assert {{ MODULE_NAME }}.type == {{ MODULE_NAME }} + +def test_serialization(): + assert {{ MODULE_NAME }}.id == {{ MODULE_NAME }}.model_validate_json() \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py.j2 new file mode 100644 index 000000000..ddb938a88 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py.j2 @@ -0,0 +1,18 @@ +from {{ PACKAGE_ROOT }}.{{ MODULE_NAME }} import {{ MODULE_NAME }} + + +__all__ = [ "{{ MODULE_NAME }}" ] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("{{ PACKAGE_ROOT }}") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" + diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py.j2 new file mode 100644 index 000000000..638b22c44 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/component/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py.j2 @@ -0,0 +1,10 @@ +from swarmauri_core.Component import ComponentBase + + +@ComponentBase.register_type({{ BASE_CLASS }}, "{{MODULE_NAME}}") +class {{ MODULE_NAME }}({% if mixins %}{{ MIXINS|join(', ') }},{% endif %}{{ BASE_CLASS }}, ComponentBase): + type: Literal["{{ MODULE_NAME }}"] = "{{ MODULE_NAME }}" + + + # Methods + \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/agent_default.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/agent_default.j2 new file mode 100644 index 000000000..0765abc9f --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/agent_default.j2 @@ -0,0 +1,65 @@ +I need you to generate a file named "{{ RENDERED_FILE_NAME }}". This file provides part of the implementation for the "{{ PACKAGE_ROOT }}" package. + +Below are the details and requirements for this task: + +1. **Purpose / Description**: + {{ PURPOSE }} + + {{ DESCRIPTION }} + +2. **Requirements / Constraints**: + {%- for requirement in REQUIREMENTS %} + - {{ requirement }} + {%- endfor %} + + +3. **File and Code Preferences**: + - The code follows PEP 8 style guidelines. + - All functions, classes, and methods include clear docstrings that explain their functionality. + - Include module-level docstrings to explain the purpose of modules. + - Type hints are used throughout the code. + - Critical sections of the code include comments explaining the logic. + - Assume the user will copy-paste this code as-is into their project. + - Provide the concrete fully functional implementation of the code. + - All methods must feature a complete implementation. + + +4. **Behavior**: + - Must inherit the base class. + - Must provide fully functional implementations for all required methods. + +### Desired Output +Produce a single code snippet that contains the entire content for the file {{ RENDERED_FILE_NAME }}. +Do not wrap it in triple backticks! Just provide the raw code ready to copy-paste. +Do not provide an explanation afterward. + +{# At the bottom of the template, include any example files specified by the EXAMPLES array #} +{% if EXAMPLES %} +#### +Leverage the reference file below as a guideline to utilize when creating the new component. + +{%- for example in EXAMPLES %} +#### Reference File: `{{ example }}` +``` +{% include example %} + +``` +{%- endfor %} +{% endif %} + +### Example Format of the Output + +{% include FILE_NAME %} + +{# At the bottom of the template, include any dependency files specified by the DEPENDENCIES array #} +{% if RENDERED_DEPENDENCIES %} +### Dependencies +{%- for dependency in RENDERED_DEPENDENCIES %} +#### `{{ dependency }}` +``` +{% include dependency %} + +``` + +{%- endfor %} +{% endif %} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/agent_requirements.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/agent_requirements.j2 new file mode 100644 index 000000000..c2028cd1e --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/agent_requirements.j2 @@ -0,0 +1,55 @@ +I need you to generate requirements. This file provides specifications or the "{{ PACKAGE_ROOT }}" package. + +Below are the details and requirements for this task: + +1. **Purpose / Description**: + {{ PURPOSE }} + + {{ DESCRIPTION }} + +2. **Requirements / Constraints**: + {%- for requirement in REQUIREMENTS %} + - {{ requirement }} + {%- endfor %} + + +3. **File Preferences**: + - Use markdown formatting + +4. **Behavior**: + - Professional + - Requirements Driven Focus + +### Desired Output +Do not wrap it in triple backticks! Just provide the raw file ready to copy-paste. + +{# At the bottom of the template, include any example files specified by the EXAMPLES array #} +{% if EXAMPLES %} +#### +Leverage the reference file below as a guideline to utilize when creating the new component. + +{%- for example in EXAMPLES %} +#### Reference File: `{{ example }}` +``` +{% include example %} + +``` +{%- endfor %} +{% endif %} + +### Example Format of the Output + +{% include FILE_NAME %} + +{# At the bottom of the template, include any dependency files specified by the DEPENDENCIES array #} +{% if RENDERED_DEPENDENCIES %} +### Dependencies +{%- for dependency in RENDERED_DEPENDENCIES %} +#### `{{ dependency }}` +``` +{% include dependency %} + +``` + +{%- endfor %} +{% endif %} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/llm-payload.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/llm-payload.json new file mode 100644 index 000000000..7f1c41e35 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/llm-payload.json @@ -0,0 +1,77 @@ +[ + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "PURPOSE": "Implements the new component's main class.", + "DESCRIPTION": "This file defines the primary class, referencing a base class and mixin.", + "REQUIREMENTS": [ + "Ensure the base class and mixin logic is correctly integrated.", + "Ensure that all methods include complete, comprehensive, and full functional implementation logic.", + "Use HTTPX." + ], + "DEPENDENCIES": ["base/swarmauri_base/llms/LLMBase.py"], + "PROCESS_TYPE": "GENERATE", + "BASE_CLASS": "LLMBase", + "MIXINS": [], + "EXAMPLES": ["standards/swarmauri_standard/swarmauri_standard/llms/GroqModel.py"] + + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py", + "PURPOSE": "Initializes the new component's package.", + "DESCRIPTION": "Exposes and imports the main class from {{ MODULE_NAME }}.py.", + "REQUIREMENTS": ["Should import the class defined in {{ MODULE_NAME }}.py."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}//swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/tests/unit/test_{{ MODULE_NAME }}.py", + "PURPOSE": "Contains pytest based test cases for the new component class.", + "DESCRIPTION": "Verifies the functionality and correctness of the class in {{ MODULE_NAME }}.py.", + "REQUIREMENTS": ["Should import and test the class from the main module."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/tests/unit/test___init__.py", + "PURPOSE": "Contains pytest based test cases for the package initialization.", + "DESCRIPTION": "Ensures __init__.py correctly exposes the new component class.", + "REQUIREMENTS": ["Should import from the package's __init__.py file."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/README.md", + "PURPOSE": "Provides documentation and usage details for the new component.", + "DESCRIPTION": "Includes instructions, examples, and references for {{ MODULE_NAME }}.py", + "REQUIREMENTS": [ + "Include required header fragment.", + "Should mention how to install, configure, and integrate the new component." + ], + "DEPENDENCIES": [ + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py" + ], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/LICENSE", + "PURPOSE": "Defines the licensing terms for this project.", + "DESCRIPTION": "Contains the open-source (or proprietary) LICENSE text.", + "REQUIREMENTS": [], + "DEPENDENCIES": [], + "PROCESS_TYPE": "COPY" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}//swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/pyproject.toml", + "PURPOSE": "Specifies Python project metadata and dependencies.", + "DESCRIPTION": "Indicates the project’s name, version, dependencies, and build system.", + "REQUIREMENTS": ["Must reference the main module, README, and LICENSE as applicable."], + "DEPENDENCIES": [ + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/README.md", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/LICENSE" + ], + "PROCESS_TYPE": "GENERATE" + } +] \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/parsers-projects_payloads.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/parsers-projects_payloads.json new file mode 100644 index 000000000..133e6d83e --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/parsers-projects_payloads.json @@ -0,0 +1,666 @@ +[ + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Markdown (.md) files, facilitating documentation generation and content management.", + "PACKAGE_ROOT": "swarmauri_parser_markdown", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "MarkdownParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Markdown content and convert it into HTML or other formats.", + "Handle various Markdown syntax variations and extensions.", + "Support integration with documentation generation pipelines.", + "Allow for customization of parsing rules and output formats.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "markdown", "version": "*" }, + { "name": "mistune", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from LaTeX (.tex) files, facilitating scientific document generation and typesetting.", + "PACKAGE_ROOT": "swarmauri_parser_latex", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "LaTeXParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse LaTeX content and extract structural elements like sections, equations, and references.", + "Handle various LaTeX packages and commands.", + "Support conversion to other formats like HTML or PDF.", + "Allow integration with scientific document generation pipelines.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pylatex", "version": "*" }, + { "name": "plasTeX", "version": "*" }, + { "name": "latexcodec", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from PDF (.pdf) files, facilitating document analysis, text extraction, and metadata processing.", + "PACKAGE_ROOT": "swarmauri_parser_pdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "PDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Extract text and metadata from PDF documents.", + "Handle various PDF structures, including scanned documents and embedded images.", + "Support character encoding variations and text extraction accuracy.", + "Allow integration with document processing pipelines and OCR tools.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "PyPDF2", "version": "*" }, + { "name": "pdfminer.six", "version": "*" }, + { "name": "pdfplumber", "version": "*" }, + { "name": "PyMuPDF", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Microsoft Office (.docx, .xlsx, .pptx) files, facilitating document, spreadsheet, and presentation analysis.", + "PACKAGE_ROOT": "swarmauri_parser_microsoftoffice", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "MicrosoftOfficeParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse .docx, .xlsx, and .pptx files to extract text, data, and metadata.", + "Handle various Office document structures and embedded elements.", + "Support integration with document processing and data analysis pipelines.", + "Allow for customization of parsing rules based on document types.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "python-docx", "version": "*" }, + { "name": "openpyxl", "version": "*" }, + { "name": "python-pptx", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from GeoJSON (.geojson) files, facilitating geospatial data analysis and visualization.", + "PACKAGE_ROOT": "swarmauri_parser_geojson", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "GeoJSONParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse GeoJSON files and extract geographic features and properties.", + "Handle various GeoJSON geometry types and coordinate systems.", + "Support integration with geospatial data processing and visualization pipelines.", + "Allow for customization of parsing rules based on feature properties.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "geojson", "version": "*" }, + { "name": "geopandas", "version": "*" }, + { "name": "shapely", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from GeoPackage (.gpkg) files, facilitating geospatial data storage and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_geopackage", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "GeoPackageParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse GeoPackage files and extract geospatial layers and metadata.", + "Handle various GeoPackage feature types and coordinate systems.", + "Support integration with geospatial data processing and analysis pipelines.", + "Allow for customization of parsing rules based on layer properties.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "geopandas", "version": "*" }, + { "name": "GDAL/OGR", "version": "*" }, + { "name": "shapely", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SAS (.sas7bdat) files, facilitating statistical data analysis and migration from SAS to Python.", + "PACKAGE_ROOT": "swarmauri_parser_sas", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SASParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse SAS .sas7bdat files and convert them into pandas DataFrames.", + "Handle various SAS data types and metadata.", + "Support integration with statistical analysis and data processing pipelines.", + "Allow for customization based on SAS file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "sas7bdat", "version": "*" }, + { "name": "pyreadstat", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SPSS (.sav) files, facilitating statistical data analysis and migration from SPSS to Python.", + "PACKAGE_ROOT": "swarmauri_parser_spss", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SPSSParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse SPSS .sav files and convert them into pandas DataFrames.", + "Handle various SPSS data types and metadata.", + "Support integration with statistical analysis and data processing pipelines.", + "Allow for customization based on SPSS file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyreadstat", "version": "*" }, + { "name": "savReaderWriter", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Stata (.dta) files, facilitating statistical data analysis and migration from Stata to Python.", + "PACKAGE_ROOT": "swarmauri_parser_stata", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "StataParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Stata .dta files and convert them into pandas DataFrames.", + "Handle various Stata data types and metadata.", + "Support integration with statistical analysis and data processing pipelines.", + "Allow for customization based on Stata file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyreadstat", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from BSON (.bson) files, facilitating efficient binary data processing and serialization.", + "PACKAGE_ROOT": "swarmauri_parser_bson", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "BSONParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse BSON files and deserialize data into Python objects.", + "Support packing and unpacking of various data types.", + "Handle streaming BSON data for large datasets.", + "Allow integration with binary data processing pipelines.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "bson", "version": "*" }, + { "name": "pymongo", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Parquet (.parquet) files, facilitating efficient columnar data processing and storage.", + "PACKAGE_ROOT": "swarmauri_parser_parquet", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ParquetParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Parquet files and convert them into pandas DataFrames.", + "Handle various Parquet schema versions and data types.", + "Support integration with big data processing and analytics pipelines.", + "Allow for customization based on Parquet file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyarrow", "version": "*" }, + { "name": "fastparquet", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from ORC (.orc) files, facilitating efficient columnar data processing and storage in Hadoop ecosystems.", + "PACKAGE_ROOT": "swarmauri_parser_orc", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ORCParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse ORC files and convert them into pandas DataFrames.", + "Handle various ORC schema versions and data types.", + "Support integration with big data processing and analytics pipelines.", + "Allow for customization based on ORC file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyarrow", "version": "*" }, + { "name": "fastparquet", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Feather (.feather) files, facilitating fast, lightweight data exchange and storage for pandas DataFrames.", + "PACKAGE_ROOT": "swarmauri_parser_feather", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "FeatherParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Feather files and convert them into pandas DataFrames.", + "Handle various Feather schema versions and data types.", + "Support integration with data analysis and machine learning pipelines.", + "Allow for customization based on Feather file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyarrow", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from HDF5 (.h5, .hdf5) files, facilitating high-performance storage and retrieval of large-scale scientific data.", + "PACKAGE_ROOT": "swarmauri_parser_hdf5", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "HDF5Parser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse HDF5 files and convert datasets into Python objects or pandas DataFrames.", + "Handle complex HDF5 group and dataset structures.", + "Support integration with scientific data processing and analysis pipelines.", + "Allow for customization based on HDF5 file structures and metadata.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "h5py", "version": "*" }, + { "name": "PyTables", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from NetCDF (.nc) files, facilitating storage and analysis of array-oriented scientific data.", + "PACKAGE_ROOT": "swarmauri_parser_netcdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "NetCDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse NetCDF files and convert datasets into Python objects or xarray Datasets.", + "Handle various NetCDF dimensions, variables, and metadata.", + "Support integration with scientific data processing and analysis pipelines.", + "Allow for customization based on NetCDF file structures and metadata.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "netCDF4", "version": "*" }, + { "name": "xarray", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SDF (.sdf) files, facilitating molecular structure analysis in cheminformatics applications.", + "PACKAGE_ROOT": "swarmauri_parser_sdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse SDF files and extract molecular structures and properties.", + "Handle various SDF formats and embedded data fields.", + "Support integration with cheminformatics and molecular analysis pipelines.", + "Allow for customization based on molecular data structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "RDKit", "version": "*" }, + { "name": "openbabel", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from RDF (.rdf) files, facilitating semantic web data processing and knowledge graph construction.", + "PACKAGE_ROOT": "swarmauri_parser_rdf", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "RDFParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse RDF files and extract triples and semantic relationships.", + "Handle various RDF serialization formats (e.g., RDF/XML, Turtle, N-Triples).", + "Support integration with semantic web and knowledge graph pipelines.", + "Allow for customization based on RDF ontologies and vocabularies.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "rdflib", "version": "*" }, + { "name": "PyLD", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from SQLite (.sqlite, .db) files, facilitating database management and data analysis.", + "PACKAGE_ROOT": "swarmauri_parser_sqlite", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SQLiteParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Connect to SQLite databases and execute SQL queries to extract data.", + "Handle various SQLite schemas and data types.", + "Support integration with data analysis and processing pipelines.", + "Allow for customization of SQL queries based on data extraction needs.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "sqlite3", "version": "*" }, + { "name": "SQLAlchemy", "version": "*" }, + { "name": "pandas", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and execute SQL (.sql) scripts, facilitating database migrations, schema management, and data seeding.", + "PACKAGE_ROOT": "swarmauri_parser_sql", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SQLParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse and execute SQL scripts to create, modify, and populate databases.", + "Handle various SQL dialects and commands.", + "Support integration with database migration and management pipelines.", + "Allow for customization of SQL execution based on environment configurations.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "sqlparse", "version": "*" }, + { "name": "SQLAlchemy", "version": "*" }, + { "name": "sqlite3", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from EBML (.ebml) files, facilitating binary data processing and analysis in multimedia applications.", + "PACKAGE_ROOT": "swarmauri_parser_ebml", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "EBMLParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse EBML files and extract binary data elements and metadata.", + "Handle various EBML structures and element types.", + "Support integration with multimedia processing and analysis pipelines.", + "Allow for customization based on EBML schema definitions.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyebml", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Executable Formats (.exe, .dll) files, facilitating binary analysis and reverse engineering.", + "PACKAGE_ROOT": "swarmauri_parser_executable", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ExecutableParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Executable (.exe) and Dynamic Link Library (.dll) files to extract metadata and binary structures.", + "Handle various executable formats and architectures.", + "Support integration with reverse engineering and binary analysis pipelines.", + "Allow for customization based on executable file structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pefile", "version": "*" }, + { "name": "lief", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Syslog (.syslog) files, facilitating system monitoring and log analysis.", + "PACKAGE_ROOT": "swarmauri_parser_syslog", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "SyslogParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse Syslog files and extract log entries with structured data.", + "Handle various Syslog message formats and severity levels.", + "Support integration with system monitoring and log analysis pipelines.", + "Allow for customization based on Syslog message patterns.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyparsing", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Audio files (.mp3, .wav, .flac), facilitating audio metadata extraction and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_audio", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "AudioParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse audio files and extract metadata, such as artist, album, and track information.", + "Handle various audio codecs and formats.", + "Support integration with audio analysis and processing pipelines.", + "Allow for customization based on audio file metadata structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pydub", "version": "*" }, + { "name": "mutagen", "version": "*" }, + { "name": "librosa", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Video files (.mp4, .avi, .mkv), facilitating video metadata extraction and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_video", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "VideoParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse video files and extract metadata, such as codec information, duration, and resolution.", + "Handle various video codecs and formats.", + "Support integration with video analysis and processing pipelines.", + "Allow for customization based on video file metadata structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "opencv-python", "version": "*" }, + { "name": "moviepy", "version": "*" }, + { "name": "ffmpeg-python", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from INI (.ini) files, facilitating application configuration management and data extraction.", + "PACKAGE_ROOT": "swarmauri_parser_ini", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "INIParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse INI files and extract configuration settings and parameters.", + "Handle various INI file structures and sections.", + "Support integration with configuration management and application setup pipelines.", + "Allow for customization based on INI file structures and key-value pairs.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "configparser", "version": "*" }, + { "name": "logging", "version": "*" } + ] + }, + { + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A parser designed to interpret and extract data from Image files (.png, .jpg, .jpeg, .bmp, .gif, .tiff), facilitating image metadata extraction and analysis.", + "PACKAGE_ROOT": "swarmauri_parser_image", + "RESOURCE_KIND": "parser", + "MODULE_NAME": "ImageParser", + "BASE_CLASS_NAME": "ParserBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/parsers/ParserBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py", + "ADDITIONAL_REQUIREMENTS": [ + "Parse image files and extract metadata, such as EXIF data, resolution, and color profiles.", + "Handle various image formats and encoding schemes.", + "Support integration with image processing and analysis pipelines.", + "Allow for customization based on image metadata structures.", + "Provide a complete implementation of all internal parsing methods.", + "Ensure robust error handling and logging within the parser." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "Pillow", "version": "*" }, + { "name": "imageio", "version": "*" }, + { "name": "opencv-python", "version": "*" }, + { "name": "logging", "version": "*" } + ] + } +] diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/payload.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/payload.json new file mode 100644 index 000000000..2f1d4744a --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/payload.json @@ -0,0 +1,79 @@ +[ + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "PURPOSE": "Implements the new component's main class.", + "DESCRIPTION": "This file defines the primary class, referencing a base class and mixin.", + "REQUIREMENTS": [ + "Ensure the base class and mixin logic is correctly integrated.", + "Ensure that all methods include complete, comprehensive, and full functional implementation logic." + ], + "DEPENDENCIES": ["base/swarmauri_base/tools/ToolBase.py"], + "PROCESS_TYPE": "GENERATE", + "BASE_CLASS": "ToolBase", + "MIXINS": [], + "EXAMPLES": ["standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py"] + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py", + "PURPOSE": "Initializes the new component's package.", + "DESCRIPTION": "Exposes and imports the main class from {{ MODULE_NAME }}.py.", + "REQUIREMENTS": ["Should import the class defined in {{ MODULE_NAME }}.py."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}//swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/tests/unit/test_{{ MODULE_NAME }}.py", + "PURPOSE": "Contains pytest based test cases for the new component class.", + "DESCRIPTION": "Verifies the functionality and correctness of the class in {{ MODULE_NAME }}.py.", + "REQUIREMENTS": ["Should import and test the class from the main module."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/tests/unit/test___init__.py", + "PURPOSE": "Contains pytest based test cases for the package initialization.", + "DESCRIPTION": "Ensures __init__.py correctly exposes the new component class.", + "REQUIREMENTS": ["Should import from the package's __init__.py file."], + "DEPENDENCIES": ["{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py"], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/README.rst", + "PURPOSE": "Create a comphrensive and appealing documentation, detailing usage of the new component.", + "DESCRIPTION": "Includes branding header, installation and usage instructions, and examples for {{ MODULE_NAME }}.py", + "REQUIREMENTS": [ + "Include all of the HTML header shown in the example output.", + "Should mention how to install and use the new component.", + "Provide an example of usage." + ], + "EXAMPLES": [ + "README.md" + ], + "DEPENDENCIES": [ + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py" + ], + "PROCESS_TYPE": "COPY" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/LICENSE", + "PURPOSE": "Defines the licensing terms for this project.", + "DESCRIPTION": "Contains the open-source (or proprietary) LICENSE text.", + "REQUIREMENTS": [], + "DEPENDENCIES": [], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{{ PROJECT_ROOT }}//swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/pyproject.toml", + "PURPOSE": "Specifies Python project metadata and dependencies.", + "DESCRIPTION": "Indicates the project’s name, version, dependencies, and build system.", + "REQUIREMENTS": ["Must reference the main module, README, and LICENSE as applicable."], + "DEPENDENCIES": [ + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/{{ MODULE_NAME }}.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/__init__.py", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/README.rst", + "{{ PROJECT_ROOT }}/swarmauri_{{ RESOURCE_KIND }}_{{ PACKAGE_NAME }}/LICENSE" + ], + "PROCESS_TYPE": "GENERATE" + } +] \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/payload.json.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/payload.json.j2 new file mode 100644 index 000000000..f492a12d9 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/payload.json.j2 @@ -0,0 +1,164 @@ +[ + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/REQUIREMENTS.md{% endraw %}", + "PURPOSE": "Defines the functional and non-functional requirements.", + "DESCRIPTION": "This file describes the functional and non-functional requirements of a standalone package containing a single concrete class implementation.", + "REQUIREMENTS": [ + "Provide provide functional requirements for the package.", + "Provide provide non-functional requirements for the package." + {% if GLOAL_REQUIREMENTS %} + {% for requirement in GLOAL_REQUIREMENTS %}, + "{{ requirement }}" + {% endfor %} + {% endif %} + ], + "PROCESS_TYPE": "GENERATE", + "AGENT_PROMPT_TEMPLATE": "agent_requirements.j2", + "DEPENDENCIES": [ + {% if BASE_CLASS_DEPENDENCY_FILE %} + "{% raw %}{{ BASE_CLASS_DEPENDENCY_FILE }}{% endraw %}" + {% if EXTERNAL_DOC_FILE %} + {% for doc_item in EXTERNAL_DOC_FILE %} + , "{% raw %}{{ doc_item }}{% endraw %}" + {% endfor %} + {% endif %} + {% elif EXTERNAL_DOC_FILE %} + {% for doc_item in EXTERNAL_DOC_FILE %} + "{% raw %}{{ doc_item }}{% endraw %}"{% if not loop.last %}, {% endif %} + {% endfor %} + {% endif %} + ], + "PROCESS_TYPE": "GENERATE", + "BASE_CLASS": "{{ BASE_CLASS_NAME }}", + "MIXINS": [], + "EXAMPLES": [ + {% if CONCRETE_EXAMPLE_FILE %} + "{{ CONCRETE_EXAMPLE_FILE }}" + {% if EXTERNAL_DOC_EXAMPLE_FILE %} + , "{{ EXTERNAL_DOC_EXAMPLE_FILE }}" + {% endif %} + {% elif EXTERNAL_DOC_EXAMPLE_FILE %} + "{{ EXTERNAL_DOC_EXAMPLE_FILE }}" + {% endif %} + ] + }, +{ + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py{% endraw %}", + "PURPOSE": "Implements the new component's main class.", + "DESCRIPTION": "This file defines the concrete class implementation.", + "REQUIREMENTS": [ + "Provide implementation logic for inherited methods when applicable.", + "Ensure that all methods include complete, comprehensive, and fully functional implementation logic." + {% if ADDITIONAL_REQUIREMENTS %} + {% for requirement in ADDITIONAL_REQUIREMENTS %}, + "{{ requirement }}" + {% endfor %} + {% endif %} + ], + "DEPENDENCIES": [ + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/REQUIREMENTS.md{% endraw %}" + {% if BASE_CLASS_DEPENDENCY_FILE %} + ,"{% raw %}{{ BASE_CLASS_DEPENDENCY_FILE }}{% endraw %}" + {% if EXTERNAL_DOC_FILE %} + {% for doc_item in EXTERNAL_DOC_FILE %} + , "{% raw %}{{ doc_item }}{% endraw %}" + {% endfor %} + {% endif %} + {% elif EXTERNAL_DOC_FILE %} + {% for doc_item in EXTERNAL_DOC_FILE %} + ,"{% raw %}{{ doc_item }}{% endraw %}"{% if not loop.last %}, {% endif %} + {% endfor %} + {% endif %} + ], + "PROCESS_TYPE": "GENERATE", + "BASE_CLASS": "{{ BASE_CLASS_NAME }}", + "MIXINS": [], + "EXAMPLES": [ + {% if CONCRETE_EXAMPLE_FILE %} + "{{ CONCRETE_EXAMPLE_FILE }}" + {% if EXTERNAL_DOC_EXAMPLE_FILE %} + , "{{ EXTERNAL_DOC_EXAMPLE_FILE }}" + {% endif %} + {% elif EXTERNAL_DOC_EXAMPLE_FILE %} + "{{ EXTERNAL_DOC_EXAMPLE_FILE }}" + {% endif %} + ] + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py{% endraw %}", + "PURPOSE": "Initializes the new component's package.", + "DESCRIPTION": "Exposes and imports the main class from {{ MODULE_NAME }}.py.", + "REQUIREMENTS": [ + "Should import the class defined in {{ MODULE_NAME }}.py." + ], + "DEPENDENCIES": [], + "PROCESS_TYPE": "COPY" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test_{{ MODULE_NAME }}.py{% endraw %}", + "PURPOSE": "Contains pytest based test cases for the new component class.", + "DESCRIPTION": "Verifies the functionality and correctness of the class in {{ MODULE_NAME }}.py.", + "REQUIREMENTS": [ + "Should import and test the class from the main module." + ], + "DEPENDENCIES": [ + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/REQUIREMENTS.md{% endraw %}" + ], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test___init__.py{% endraw %}", + "PURPOSE": "Contains pytest based test cases for the package initialization.", + "DESCRIPTION": "Ensures __init__.py correctly exposes the new component class.", + "REQUIREMENTS": [ + "Should import from the package's __init__.py file." + ], + "DEPENDENCIES": [ + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/REQUIREMENTS.md{% endraw %}" + ], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/README.md{% endraw %}", + "PURPOSE": "Create a comprehensive README.md, detailing usage of the new component.", + "DESCRIPTION": "Includes branding header, installation and usage instructions, and examples for {{ MODULE_NAME }}.py", + "REQUIREMENTS": [ + "Include all of the HTML header shown in the example output.", + "Based on the dependencies, create comprehensive installation and usage documentation sections.", + "The installation and usage documentation sections should be significantly more complete than the example format provided.", + "Do not include details regarding cloning or forking the repository." + ], + "DEPENDENCIES": [ + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/REQUIREMENTS.md{% endraw %}" + ], + "PROCESS_TYPE": "GENERATE" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE{% endraw %}", + "PURPOSE": "Defines the licensing terms for this project.", + "DESCRIPTION": "Contains the open-source (or proprietary) LICENSE text.", + "PROCESS_TYPE": "COPY" + }, + { + "FILE_NAME": "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml{% endraw %}", + "PURPOSE": "Specifies Python project metadata and dependencies.", + "DESCRIPTION": "Indicates the project’s name, version, dependencies, and build system.", + "REQUIREMENTS": [ + "Must reference the main module, README, and LICENSE as applicable.", + "Add keywords.", + "Add missing third party dependencies." + ], + "DEPENDENCIES": [ + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE{% endraw %}", + "{% raw %}{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/REQUIREMENTS.md{% endraw %}" + ], + "PROCESS_TYPE": "GENERATE" + } +] diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/projects_payloads.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/projects_payloads.json new file mode 100644 index 000000000..4f72e6ecf --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/projects_payloads.json @@ -0,0 +1,23 @@ +[ + { + "PROJECT_ROOT": "pkgs", + "TEMPLATE_SET": "test", + "PACKAGE_DESCRIPTION": "Starts a performance profiling session to capture CPU, memory, and I/O usage metrics.", + "PACKAGE_ROOT": "swarmauri_tool_profiling_startsession22", + "RESOURCE_KIND": "tools", + "MODULE_NAME": "StartProfilingSessionTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "GLOAL_REQUIREMENTS": [ + "Initialize profiling using libraries such as cProfile or py-spy.", + "Capture CPU, memory, and I/O metrics from the application.", + "Log the start of the profiling session with appropriate timestamps." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "py-spy", "version": "*" } + ] + } +] diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/tool-payload.json b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/tool-payload.json new file mode 100644 index 000000000..be41f88a7 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/tool-payload.json @@ -0,0 +1,25 @@ +{ + "PROJECT_ROOT": "GENERATIONS", + "PACKAGE_DESCRIPTION": "A tool to automate the process of sending desktop notifications based on specific triggers or events, enabling timely alerts and reminders without manual intervention.", + "PACKAGE_ROOT": "swarmauri_tool_notificationhandlertool", + "RESOURCE_KIND": "tool", + "MODULE_NAME": "NotificationHandlerTool", + "BASE_CLASS_NAME": "ToolBase", + "BASE_CLASS_DEPENDENCY_FILE": "base/swarmauri_base/tools/ToolBase.py", + "CONCRETE_EXAMPLE_FILE": "standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py", + "ADDITIONAL_REQUIREMENTS": [ + "Provide options to define triggers or events that will initiate notifications (e.g., time-based, file changes).", + "Allow customization of notification content, including titles, messages, and icons.", + "Include functionality to schedule recurring notifications and manage notification priorities.", + "Provide a complete implementation of all internal methods.", + "Ensure robust error handling and logging within the tool." + ], + "EXTERNAL_DOC_FILE": null, + "EXTERNAL_DOC_EXAMPLE_FILE": null, + "THIRD_PARTY_DEPENDENCIES": [ + { "name": "pyautogui", "version": "*" }, + { "name": "plyer", "version": "*" }, + { "name": "schedule", "version": "*" }, + { "name": "logging", "version": "*" } + ] +} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE.j2 new file mode 100644 index 000000000..f35554359 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/LICENSE.j2 @@ -0,0 +1 @@ +EXMAPLE LICENSE 2 \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/README.md.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/README.md.j2 new file mode 100644 index 000000000..21e46fd32 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/README.md.j2 @@ -0,0 +1,28 @@ +![Swamauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - {{ PACKAGE_ROOT }} +

+ +--- + +# `{{ PACKAGE_ROOT }}` + +## Installation + + + +## Usage + + + diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/__init__.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/REQUIREMENTS.md.j2 similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/__init__.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/REQUIREMENTS.md.j2 diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml.j2 new file mode 100644 index 000000000..d5e0d38f8 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/pyproject.toml.j2 @@ -0,0 +1,65 @@ +[tool.poetry] +name = "{{ PACKAGE_ROOT }}" +version = "0.1.0.dev1" +description = "{{ PACKAGE_DESCRIPTION }}" +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT}}/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] +keywords = [] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/standards/swarmauri_standard"} + + +# Dependencies + +{% if THIRD_PARTY_DEPENDENCIES %} +{% for dependency in THIRD_PARTY_DEPENDENCIES %} +{{ dependency.name }} = "{{ dependency.version }}" +{% endfor %} +{% endif %} + + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] + +markers = [ + "test: standard test", + "unit: Unit tests", + "integration: Integration tests", + "acceptance: Acceptance tests", + "experimental: Experimental tests" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +[tool.poetry.plugins."swarmauri.{{RESOURCE_KIND}}"] +{{ MODULE_NAME|lower }} = "{{ PACKAGE_ROOT }}:{{ MODULE_NAME }}" diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tts/__init__.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test___init__.py.j2 similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tts/__init__.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test___init__.py.j2 diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test_{{ MODULE_NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test_{{ MODULE_NAME }}.py.j2 new file mode 100644 index 000000000..e182dce05 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/tests/unit/test_{{ MODULE_NAME }}.py.j2 @@ -0,0 +1,12 @@ +import pytest +from {{ PACKAGE_ROOT }}.{{ MODULE_NAME }} import {{ MODULE_NAME }} + + +def test_resource(): + assert {{ MODULE_NAME }}.resource == {{ RESOURCE_KIND }} + +def test_type(): + assert {{ MODULE_NAME }}.type == {{ MODULE_NAME }} + +def test_serialization(): + assert {{ MODULE_NAME }}.id == {{ MODULE_NAME }}.model_validate_json() \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py.j2 new file mode 100644 index 000000000..ddb938a88 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/__init__.py.j2 @@ -0,0 +1,18 @@ +from {{ PACKAGE_ROOT }}.{{ MODULE_NAME }} import {{ MODULE_NAME }} + + +__all__ = [ "{{ MODULE_NAME }}" ] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("{{ PACKAGE_ROOT }}") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" + diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py.j2 new file mode 100644 index 000000000..638b22c44 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test/{{ PROJECT_ROOT }}/{{ PACKAGE_ROOT }}/{{ PACKAGE_ROOT }}/{{ MODULE_NAME }}.py.j2 @@ -0,0 +1,10 @@ +from swarmauri_core.Component import ComponentBase + + +@ComponentBase.register_type({{ BASE_CLASS }}, "{{MODULE_NAME}}") +class {{ MODULE_NAME }}({% if mixins %}{{ MIXINS|join(', ') }},{% endif %}{{ BASE_CLASS }}, ComponentBase): + type: Literal["{{ MODULE_NAME }}"] = "{{ MODULE_NAME }}" + + + # Methods + \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/agent_default.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/agent_default.j2 new file mode 100644 index 000000000..0411edf7d --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/agent_default.j2 @@ -0,0 +1,65 @@ +I need you to generate a file named "{{ RENDERED_FILE_NAME }}". This file provides part of the implementation for the "{{ PKG.NAME }}" package. + +Below are the details and requirements for this task: + +1. **Purpose / Description**: + {{ PURPOSE }} + + {{ DESCRIPTION }} + +2. **Requirements / Constraints**: + {%- for requirement in REQUIREMENTS %} + - {{ requirement }} + {%- endfor %} + + +3. **File and Code Preferences**: + - The code follows PEP 8 style guidelines. + - All functions, classes, and methods include clear docstrings that explain their functionality. + - Include module-level docstrings to explain the purpose of modules. + - Type hints are used throughout the code. + - Critical sections of the code include comments explaining the logic. + - Assume the user will copy-paste this code as-is into their project. + - Provide the concrete fully functional implementation of the code. + - All methods must feature a complete implementation. + + +4. **Behavior**: + - Must inherit the base class. + - Must provide fully functional implementations for all required methods. + +### Desired Output +Produce a single code snippet that contains the entire content for the file {{ RENDERED_FILE_NAME }}. +Do not wrap it in triple backticks! Just provide the raw code ready to copy-paste. +Do not provide an explanation afterward. + +{# At the bottom of the template, include any example files specified by the EXAMPLES array #} +{% if EXAMPLES %} +#### +Leverage the reference file below as a guideline to utilize when creating the new component. + +{%- for example in EXAMPLES %} +#### Reference File: `{{ example }}` +``` +{% include example %} + +``` +{%- endfor %} +{% endif %} + +### Example Format of the Output + +{% include FILE_NAME %} + +{# At the bottom of the template, include any dependency files specified by the DEPENDENCIES array #} +{% if RENDERED_DEPENDENCIES %} +### Dependencies +{%- for dependency in RENDERED_DEPENDENCIES %} +#### `{{ dependency }}` +``` +{% include dependency %} + +``` + +{%- endfor %} +{% endif %} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/agent_requirements.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/agent_requirements.j2 new file mode 100644 index 000000000..8bfbfb16c --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/agent_requirements.j2 @@ -0,0 +1,55 @@ +I need you to generate requirements. This file provides specifications for the "{{ NAME }}" project. + +Below are the details and requirements for this task: + +1. **Purpose / Description**: + {{ PURPOSE }} + + {{ DESCRIPTION }} + +2. **Requirements / Constraints**: + {%- for requirement in REQUIREMENTS %} + - {{ requirement }} + {%- endfor %} + + +3. **File Preferences**: + - Use markdown formatting + +4. **Behavior**: + - Professional + - Requirements Driven Focus + +### Desired Output +Do not wrap it in triple backticks! Just provide the raw file ready to copy-paste. + +{# At the bottom of the template, include any example files specified by the EXAMPLES array #} +{% if EXAMPLES %} +#### +Leverage the reference file below as a guideline to utilize when creating the new component. + +{%- for example in EXAMPLES %} +#### Reference File: `{{ example }}` +``` +{% include example %} + +``` +{%- endfor %} +{% endif %} + +### Example Format of the Output + +{% include FILE_NAME %} + +{# At the bottom of the template, include any dependency files specified by the DEPENDENCIES array #} +{% if RENDERED_DEPENDENCIES %} +### Dependencies +{%- for dependency in RENDERED_DEPENDENCIES %} +#### `{{ dependency }}` +``` +{% include dependency %} + +``` + +{%- endfor %} +{% endif %} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/agent_test.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/agent_test.j2 new file mode 100644 index 000000000..b1e840709 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/agent_test.j2 @@ -0,0 +1 @@ +{{ VALUE }} \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/ptree.yaml.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/ptree.yaml.j2 new file mode 100644 index 000000000..81b771376 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/ptree.yaml.j2 @@ -0,0 +1,243 @@ +# ===================================================== +# Global Project Files +# ===================================================== + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "agent_requirements.j2" + PROJECT_NAME: "Global" + PACKAGE_NAME: "None" + MODULE_NAME: "None" + DEPENDENCIES: [] + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Defines project-wide functional and non-functional requirements." + DESCRIPTION: "This file contains the global requirements for the project." + REQUIREMENTS: + - "Provide functional and non-functional requirements at the project level." + {% if PROJECT_REQUIREMENTS is defined %} + {% for req in PROJECT_REQUIREMENTS %} + - "{{ req }}" + {% endfor %} + {% endif %} + +# ===================================================== +# Begin Package File Entries +# ===================================================== +{% for PKG in PACKAGES %} + +# ------------------------- +# Package-Level Files for {{ PKG.NAME }} +# ------------------------- + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE" + PROCESS_TYPE: "COPY" + AGENT_PROMPT_TEMPLATE: "agent_default.j2" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: [] + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Defines the license for package {{ PKG.NAME }}." + DESCRIPTION: "This file contains the license for package {{ PKG.NAME }}." + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2') }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: + {# Loop over modules to include their main file as dependencies #} + {% for MOD in PKG.MODULES %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py" + {% endfor %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py" + TEMPLATE_SET: "{{ PKG.TEMPLATE_SET_OVERRIDE | default(TEMPLATE_SET) }}" + FILE_CONTEXT: + PURPOSE: "Specifies package metadata and dependencies for {{ PKG.NAME }}." + DESCRIPTION: "Configuration for package {{ PKG.NAME }}. Must reference all module source files, README, and LICENSE." + REQUIREMENTS: + - "Must reference the main module, README, and LICENSE as applicable." + - "Must include any third party dependencies, setting the version to '*'" + - "Must include a comprehensive list of applicable keywords." + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2') }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py" + {% for MOD in PKG.MODULES %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py" + {% endfor %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml" + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + TEMPLATE_SET: "{{ PKG.TEMPLATE_SET_OVERRIDE | default(TEMPLATE_SET) }}" + FILE_CONTEXT: + PURPOSE: "Documents package {{ PKG.NAME }}." + DESCRIPTION: "{{ PKG.DESCRIPTION }}" + REQUIREMENTS: + - "Purpose: {{ PKG.PURPOSE }}" + - "Authors: {{ PKG.AUTHORS | join(', ') }}" + - "Include all of the HTML header shown in the example output." + - "Based on the dependencies, create comprehensive installation and usage documentation sections." + - "The installation and usage documentation sections should be significantly more complete than the example format provided." + - "Do not include details regarding cloning or forking the repository." + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "agent_requirements.j2" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md" + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Lists requirements for package {{ PKG.NAME }}." + DESCRIPTION: "Functional and non-functional requirements for package {{ PKG.NAME }}." + REQUIREMENTS: + - "Provide functional requirements for the package." + - "Provide non-functional requirements for the package." + {% if PKG.REQUIREMENTS %} + {% for req in PKG.REQUIREMENTS %} + - "{{ req }}" + {% endfor %} + {% else %} + {# If no package-specific requirements are provided, fall back to project-wide ones #} + {% for req in PROJECT_REQUIREMENTS %} + - "{{ req }}" + {% endfor %} + {% endif %} + +# ------------------------- +# Package Source Files +# ------------------------- + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py" + PROCESS_TYPE: "COPY" + AGENT_PROMPT_TEMPLATE: "agent_default.j2" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: [] + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Initializes the source package for {{ PKG.NAME }}." + DESCRIPTION: "This file makes the directory a Python package." + +{% for MOD in PKG.MODULES %} +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ MOD.AGENT_PROMPT_TEMPLATE_OVERRIDE | default(PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2')) }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "{{ MOD.NAME }}" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + {% if MOD.BASE_FILE %} + - "{{ MOD.BASE_FILE }}" + {% endif %} + {% if MOD.EXTERNAL_DOC_FILES %} + {% for doc in MOD.EXTERNAL_DOC_FILES %} + - "{{ doc }}" + {% endfor %} + {% endif %} + {% if MOD.DEPENDENCIES %} + {% for dep in MOD.DEPENDENCIES %} + {% if "/" in dep %} + - "{{ dep }}" + {% else %} + {% set parts = dep | split('.') %} + {% if parts | length > 2 %} + - "{{ PROJECT_ROOT }}/{{ (parts)[0] }}/{{ (parts)[0] }}/{{ (parts)[1] }}.{{ (parts)[2] }}" + {% else %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ dep }}" + {% endif %} + {% endif %} + {% endfor %} + {% endif %} + {% if MOD.MIXIN_FILES %} + {% for mf in MOD.MIXIN_FILES %} + - "{{ mf }}" + {% endfor %} + {% endif %} + TEMPLATE_SET: "{{ MOD.TEMPLATE_SET_OVERRIDE | default(PKG.TEMPLATE_SET_OVERRIDE | default(TEMPLATE_SET)) }}" + FILE_CONTEXT: + PURPOSE: "{{ MOD.PURPOSE }}" + DESCRIPTION: "{{ MOD.DESCRIPTION }}" + REQUIREMENTS: + - "Implement the functionality for module {{ MOD.NAME }}." + - "Provide implementation logic for inherited methods when applicable." + - "Ensure that all methods include complete, comprehensive, and fully functional implementation logic." + {% if MOD.REQUIREMENTS %} + {% for req in MOD.REQUIREMENTS %} + - "{{ req }}" + {% endfor %} + {% endif %} + BASE_CLASS: "{{ MOD.BASE_NAME }}" + MIXINS: {{ MOD.MIXINS | default([]) | tojson }} + RESOURCE_KIND: "{{ MOD.RESOURCE_KIND }}" + CONCRETE_EXAMPLES: {{ MOD.CONCRETE_EXAMPLES | default([]) | tojson }} + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ MOD.AGENT_PROMPT_TEMPLATE_OVERRIDE | default(PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2')) }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "{{ MOD.NAME }}" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py" + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Unit tests for module {{ MOD.NAME }} in package {{ PKG.NAME }}." + DESCRIPTION: "Tests for module {{ MOD.NAME }}." + REQUIREMENTS: + - "Must use pytest." + - "Must use @pytest.mark.unit decorator to mark test cases." + - "Where applicable use pytest fixtures and parameterize." + - "Should import and test the class from the main module." + +{% endfor %} + +# ------------------------- +# Package-Level Tests +# ------------------------- + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test___init__.py.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test___init__.py" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2') }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py" + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Tests the package initializer for {{ PKG.NAME }}." + DESCRIPTION: "Ensures that the __init__.py in the source folder loads correctly." + REQUIREMENTS: + - "Must use pytest." + - "Must use @pytest.mark.i9n decorator to mark test cases." + - "Where applicable use pytest fixtures." + +{% endfor %} + +# ===================================================== +# End of Payload +# ===================================================== diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/__init__.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md.j2 similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/__init__.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md.j2 diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE.j2 new file mode 100644 index 000000000..41bf4830c --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE.j2 @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/__init__.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md.j2 similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/__init__.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md.j2 diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md.j2 new file mode 100644 index 000000000..cf0819e99 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md.j2 @@ -0,0 +1,28 @@ +![Swamauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - {{ PKG.NAME }} +

+ +--- + +# `{{ PKG.NAME }}` + +## Installation + + + +## Usage + + + diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 new file mode 100644 index 000000000..692ab99b8 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 @@ -0,0 +1,70 @@ +[tool.poetry] +name = "{{ PKG.NAME }}" +version = "0.1.0.dev1" +description = "{{ PKG.DESCRIPTION }}" +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] +keywords = [] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/standards/swarmauri_standard"} + + +# Dependencies + +{% if PACKAGE_REQUIRES %} +{% for required in PACKAGE_REQUIRES %} +{{ required.NAME }} = "{{ required.VERSION }}" +{% endfor %} +{% endif %} + + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] + +markers = [ + "unit: Unit tests", + "i9n: Integration tests", + "xfail": Expected failures" + "xpass": Expected passes" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +{%- set grouped_modules = PKG.MODULES | groupby('RESOURCE_KIND') %} +{%- for kind, mods in grouped_modules %} +[tool.poetry.plugins."swarmauri.{{ kind }}"] +{%- for mod in mods %} +{{ mod.NAME|lower }} = "{{ PKG.NAME }}:{{ mod.NAME|capitalize }}" +{%- endfor %} +{%- endfor %} + diff --git a/pkgs/standards/swarmauri_standard/tests/static/test_tts.mp3 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n_{{ PKG.NAME }}.py.j2 similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/test_tts.mp3 rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n_{{ PKG.NAME }}.py.j2 diff --git a/pkgs/tooling/monorepo_manager/LICENSE b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test___init__.py.j2 similarity index 100% rename from pkgs/tooling/monorepo_manager/LICENSE rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test___init__.py.j2 diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py.j2 new file mode 100644 index 000000000..b259fb07d --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py.j2 @@ -0,0 +1,15 @@ +import pytest +from {{ PKG.NAME }}.{{ MOD.NAME }} import {{ MOD.NAME }} + +@pytest.marker.unit +def test_resource(): + assert {{ MOD.NAME }}.resource == {{ MOD.NAME }} + +@pytest.marker.unit +def test_type(): + assert {{ MOD.NAME }}.type == {{ MOD.NAME }} + +@pytest.marker.unit +def test_serialization(): + {{ MOD.NAME|lower() }} = {{ MOD.NAME }}() + assert {{ MOD.NAME }}.id == {{ MOD.NAME }}.model_validate_json({{ MOD.NAME | lower() }}.model_dump_json()) \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/xfail/test_xfail_{{ PKG.NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/xfail/test_xfail_{{ PKG.NAME }}.py.j2 new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py.j2 new file mode 100644 index 000000000..37a2441e4 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py.j2 @@ -0,0 +1,23 @@ +{%- for mod in PKG.MODULES %} +from .{{ mod.NAME }} import {{ mod.NAME }} + +{%- endfor %} + +__all__ = [ +{%- for mod in PKG.MODULES %} + "{{ mod.NAME }}"{%- if not loop.last %},{%- endif %} +{%- endfor %} +] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("{{ PKG.NAME }}") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py.j2 new file mode 100644 index 000000000..2272bc6d9 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test2/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py.j2 @@ -0,0 +1,10 @@ +from swarmauri_core.ComponentBase import ComponentBase + + +@ComponentBase.register_type({{ MOD.BASE_NAME }}, "{{ MOD.NAME }}") +class {{ MOD.NAME }}({% if mixins %}{{ MIXINS|join(', ') }},{% endif %}{{ MOD.BASE_NAME }}, ComponentBase): + type: Literal["{{ MOD.NAME }}"] = "{{ MOD.NAME }}" + + + # Methods + \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/agent_default.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/agent_default.j2 new file mode 100644 index 000000000..559a77959 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/agent_default.j2 @@ -0,0 +1,66 @@ +I need you to generate a file named "{{ RENDERED_FILE_NAME }}". This file provides part of the implementation for the "{{ PKG.NAME }}" package. + +Below are the details and requirements for this task: + +1. **Purpose**: + {{ FILE_CONTEXT.PURPOSE }} + +2. **Description**: + {{ FILE_CONTEXT.DESCRIPTION }} + +3. **Requirements / Constraints**: + {%- for requirement in FILE_CONTEXT.REQUIREMENTS %} + - {{ requirement }} + {%- endfor %} + + +4. **File and Code Preferences**: + - The code follows PEP 8 style guidelines. + - All functions, classes, and methods include clear docstrings that explain their functionality. + - Include module-level docstrings to explain the purpose of modules. + - Type hints are used throughout the code. + - Critical sections of the code include comments explaining the logic. + - Assume the user will copy-paste this code as-is into their project. + - Provide the concrete fully functional implementation of the code. + - All methods must feature a complete implementation. + + +5. **Behavior**: + - Must inherit the base class. + - Must provide fully functional implementations for all required methods. + +### Desired Output +Produce a single code snippet that contains the entire content for the file {{ RENDERED_FILE_NAME }}. +Do not wrap it in triple backticks! Just provide the raw code ready to copy-paste. +Do not provide an explanation afterward. + +{# At the bottom of the template, include any example files specified by the EXAMPLES array #} +{% if FILE_CONTEXT.EXAMPLES %} +#### +Leverage the reference file below as a guideline to utilize when creating the new component. + +{%- for example in FILE_CONTEXT.EXAMPLES %} +#### Reference File: `{{ example }}` +``` +{% include example %} + +``` +{%- endfor %} +{% endif %} + +### Example Format of the Output + +{% include FILE_NAME %} + +{# At the bottom of the template, include any dependency files specified by the DEPENDENCIES array #} +{% if DEPENDENCIES %} +### Dependencies +{%- for dependency in DEPENDENCIES %} +#### `{{ dependency | replace(PROJECT.PROJECT_ROOT + '/', '') }}` +``` +{% include dependency %} + +``` + +{%- endfor %} +{% endif %} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/agent_requirements.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/agent_requirements.j2 new file mode 100644 index 000000000..4df4a4d7f --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/agent_requirements.j2 @@ -0,0 +1,56 @@ +I need you to generate requirements. This file provides specifications for the "{{ PROJECT_NAME }}" project. + +Below are the details and requirements for this task: + +1. **Purpose**: + {{ FILE_CONTEXT.PURPOSE }} + +2. **Description**: + {{ FILE_CONTEXT.DESCRIPTION }} + +3. **Requirements / Constraints**: + {%- for requirement in FILE_CONTEXT.REQUIREMENTS %} + - {{ requirement }} + {%- endfor %} + + +4. **File Preferences**: + - Use markdown formatting + +5. **Behavior**: + - Professional + - Requirements Driven Focus + +### Desired Output +Do not wrap it in triple backticks! Just provide the raw file ready to copy-paste. + +{# At the bottom of the template, include any example files specified by the EXAMPLES array #} +{% if FILE_CONTEXT.EXAMPLES %} +#### +Leverage the reference file below as a guideline to utilize when creating the new component. + +{%- for example in FILE_CONTEXT.EXAMPLES %} +#### Reference File: `{{ example }}` +``` +{% include example %} + +``` +{%- endfor %} +{% endif %} + +### Example Format of the Output + +{% include FILE_NAME %} + +{# At the bottom of the template, include any dependency files specified by the DEPENDENCIES array #} +{% if DEPENDENCIES %} +### Dependencies +{%- for dependency in DEPENDENCIES %} +#### `{{ dependency | replace(PROJECT.PROJECT_ROOT + '/', '') }}` +``` +{% include dependency %} + +``` + +{%- endfor %} +{% endif %} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/agent_test.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/agent_test.j2 new file mode 100644 index 000000000..5e544eda4 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/agent_test.j2 @@ -0,0 +1 @@ +{{ PROJECT_NAME }} \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/ptree.yaml.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/ptree.yaml.j2 new file mode 100644 index 000000000..61b40dea3 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/ptree.yaml.j2 @@ -0,0 +1,245 @@ +# ===================================================== +# Global Project Files +# ===================================================== + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "agent_requirements.j2" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "None" + MODULE_NAME: "None" + DEPENDENCIES: [] + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Defines project-wide functional and non-functional requirements." + DESCRIPTION: "This file contains the global requirements for the project." + REQUIREMENTS: + - "Provide functional and non-functional requirements at the project level." + {% if PROJECT_REQUIREMENTS is defined %} + {% for req in PROJECT_REQUIREMENTS %} + - "{{ req }}" + {% endfor %} + {% endif %} + +# ===================================================== +# Begin Package File Entries +# ===================================================== +{% for PKG in PACKAGES %} + +# ------------------------- +# Package-Level Files for {{ PKG.NAME }} +# ------------------------- + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE" + PROCESS_TYPE: "COPY" + AGENT_PROMPT_TEMPLATE: "agent_default.j2" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: [] + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Defines the license for package {{ PKG.NAME }}." + DESCRIPTION: "This file contains the license for package {{ PKG.NAME }}." + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2') }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: + {# Loop over modules to include their main file as dependencies #} + {% for MOD in PKG.MODULES %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py" + {% endfor %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py" + TEMPLATE_SET: "{{ PKG.TEMPLATE_SET_OVERRIDE | default(TEMPLATE_SET) }}" + FILE_CONTEXT: + PURPOSE: "Specifies package metadata and dependencies for {{ PKG.NAME }}." + DESCRIPTION: "Configuration for package {{ PKG.NAME }}. Must reference all module source files, README, and LICENSE." + REQUIREMENTS: + - "Must reference the main module, README, and LICENSE as applicable." + - "Must include any third party dependencies, setting the version to '*'" + - "Must include a comprehensive list of applicable keywords." + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2') }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py" + {% for MOD in PKG.MODULES %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py" + {% endfor %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml" + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + TEMPLATE_SET: "{{ PKG.TEMPLATE_SET_OVERRIDE | default(TEMPLATE_SET) }}" + FILE_CONTEXT: + PURPOSE: "Documents package {{ PKG.NAME }}." + DESCRIPTION: "{{ PKG.DESCRIPTION }}" + REQUIREMENTS: + - "Purpose: {{ PKG.PURPOSE }}" + - "Authors: {{ PKG.AUTHORS | join(', ') }}" + - "Include all of the HTML header shown in the example output." + - "Based on the dependencies, create comprehensive installation and usage documentation sections." + - "The installation and usage documentation sections should be significantly more complete than the example format provided." + - "Do not include details regarding cloning or forking the repository." + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "agent_requirements.j2" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md" + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Lists requirements for package {{ PKG.NAME }}." + DESCRIPTION: "Functional and non-functional requirements for package {{ PKG.NAME }}." + REQUIREMENTS: + - "Provide functional requirements for the package." + - "Provide non-functional requirements for the package." + {% if PKG.REQUIREMENTS %} + {% for req in PKG.REQUIREMENTS %} + - "{{ req }}" + {% endfor %} + {% else %} + {# If no package-specific requirements are provided, fall back to project-wide ones #} + {% for req in PROJECT_REQUIREMENTS %} + - "{{ req }}" + {% endfor %} + {% endif %} + +# ------------------------- +# Package Source Files +# ------------------------- + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py" + PROCESS_TYPE: "COPY" + AGENT_PROMPT_TEMPLATE: "agent_default.j2" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: [] + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Initializes the source package for {{ PKG.NAME }}." + DESCRIPTION: "This file makes the directory a Python package." + +{% for MOD in PKG.MODULES %} +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ MOD.AGENT_PROMPT_TEMPLATE_OVERRIDE | default(PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2')) }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "{{ MOD.NAME }}" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + {% if MOD.BASE_FILE %} + - "{{ MOD.BASE_FILE }}" + {% endif %} + {% if MOD.EXTERNAL_DOC_FILES %} + {% for doc in MOD.EXTERNAL_DOC_FILES %} + - "{{ doc }}" + {% endfor %} + {% endif %} + {% if MOD.DEPENDENCIES %} + {% for dep in MOD.DEPENDENCIES %} + {% if "/" in dep %} + - "{{ dep }}" + {% else %} + {% set parts = dep | split('.') %} + {% if parts | length > 2 %} + - "{{ PROJECT_ROOT }}/{{ (parts)[0] }}/{{ (parts)[0] }}/{{ (parts)[1] }}.{{ (parts)[2] }}" + {% else %} + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ dep }}" + {% endif %} + {% endif %} + {% endfor %} + {% endif %} + {% if MOD.MIXIN_FILES %} + {% for mf in MOD.MIXIN_FILES %} + - "{{ mf }}" + {% endfor %} + {% endif %} + TEMPLATE_SET: "{{ MOD.TEMPLATE_SET_OVERRIDE | default(PKG.TEMPLATE_SET_OVERRIDE | default(TEMPLATE_SET)) }}" + FILE_CONTEXT: + PURPOSE: "{{ MOD.PURPOSE }}" + DESCRIPTION: "{{ MOD.DESCRIPTION }}" + REQUIREMENTS: + - "Implement the functionality for module {{ MOD.NAME }}." + - "Provide implementation logic for inherited methods when applicable." + - "Ensure that all methods include complete, comprehensive, and fully functional implementation logic." + {% if MOD.REQUIREMENTS %} + {% for req in MOD.REQUIREMENTS %} + - "{{ req }}" + {% endfor %} + {% endif %} + BASE_CLASS: "{{ MOD.BASE_NAME }}" + MIXINS: {{ MOD.MIXINS | default([]) | tojson }} + RESOURCE_KIND: "{{ MOD.RESOURCE_KIND }}" + EXAMPLES: {{ MOD.EXAMPLES | default([]) | tojson }} + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ MOD.AGENT_PROMPT_TEMPLATE_OVERRIDE | default(PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2')) }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "{{ MOD.NAME }}" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py" + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Unit tests for module {{ MOD.NAME }} in package {{ PKG.NAME }}." + DESCRIPTION: "Tests for module {{ MOD.NAME }}." + REQUIREMENTS: + - "Must use pytest." + - "Must use @pytest.mark.unit decorator to mark test cases." + - "Where applicable use pytest fixtures and parameterize." + - "Should import and test the class from the main module." + +{% endfor %} + +# ------------------------- +# Package-Level Tests +# ------------------------- + +- FILE_NAME: "{% raw %}{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n__init__.py.j2{% endraw %}" + RENDERED_FILE_NAME: "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n__init__.py" + PROCESS_TYPE: "GENERATE" + AGENT_PROMPT_TEMPLATE: "{{ PKG.AGENT_PROMPT_TEMPLATE_OVERRIDE | default('agent_default.j2') }}" + PROJECT_NAME: "{{ PROJECT_NAME }}" + PACKAGE_NAME: "{{ PKG.NAME }}" + MODULE_NAME: "None" + DEPENDENCIES: + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py" + - "{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md" + TEMPLATE_SET: null + FILE_CONTEXT: + PURPOSE: "Tests the package initializer for {{ PKG.NAME }}." + DESCRIPTION: "Ensures that the __init__.py in the source folder loads correctly." + REQUIREMENTS: + - "Must use pytest." + - "Must use @pytest.mark.i9n decorator to mark test cases." + - "Must test the package initialization." + - "Must test the package versioning." + - "Where applicable use pytest fixtures." + +{% endfor %} + +# ===================================================== +# End of Payload +# ===================================================== diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/PROJECT_REQUIREMENTS.md.j2 new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE.j2 new file mode 100644 index 000000000..41bf4830c --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/LICENSE.j2 @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/PACKAGE_REQUIREMENTS.md.j2 new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md.j2 new file mode 100644 index 000000000..cf0819e99 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/README.md.j2 @@ -0,0 +1,28 @@ +![Swamauri Logo](https://res.cloudinary.com/dbjmpekvl/image/upload/v1730099724/Swarmauri-logo-lockup-2048x757_hww01w.png) + +

+ + PyPI - Downloads + + GitHub Hits + + PyPI - Python Version + + PyPI - License +
+ + PyPI - {{ PKG.NAME }} +

+ +--- + +# `{{ PKG.NAME }}` + +## Installation + + + +## Usage + + + diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 new file mode 100644 index 000000000..c69d45149 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 @@ -0,0 +1,70 @@ +[tool.poetry] +name = "{{ PKG.NAME }}" +version = "0.6.1.dev1" +description = "{{ PKG.DESCRIPTION }}" +authors = ["Jacob Stewart "] +license = "Apache-2.0" +readme = "README.md" +repository = "http://github.com/swarmauri/swarmauri-sdk/{{ PROJECT_ROOT }}/community/{{ PKG.NAME }}/" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" +] +keywords = [] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +# Swarmauri +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev, subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } + + +# Dependencies + +{% if PACKAGE_REQUIRES %} +{% for required in PACKAGE_REQUIRES %} +{{ required.NAME }} = "{{ required.VERSION }}" +{% endfor %} +{% endif %} + + +[tool.poetry.group.dev.dependencies] +flake8 = "^7.0" +pytest = "^8.0" +pytest-asyncio = ">=0.24.0" +pytest-xdist = "^3.6.1" +pytest-json-report = "^1.5.0" +python-dotenv = "*" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] + +markers = [ + "unit: Unit tests", + "i9n: Integration tests", + "xfail": Expected failures", + "xpass": Expected passes" +] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +asyncio_default_fixture_loop_scope = "function" + +{%- set grouped_modules = PKG.MODULES | groupby('RESOURCE_KIND') %} +{%- for kind, mods in grouped_modules %} +[tool.poetry.plugins."swarmauri.{{ kind }}"] +{%- for mod in mods %} +{{ mod.NAME|lower }} = "{{ PKG.NAME }}:{{ mod.NAME }}" +{%- endfor %} +{%- endfor %} + diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n__init__.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n__init__.py.j2 new file mode 100644 index 000000000..bb377e899 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n__init__.py.j2 @@ -0,0 +1 @@ +import pytest \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n_{{ PKG.NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n_{{ PKG.NAME }}.py.j2 new file mode 100644 index 000000000..bb377e899 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/i9n/test_i9n_{{ PKG.NAME }}.py.j2 @@ -0,0 +1 @@ +import pytest \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py.j2 new file mode 100644 index 000000000..5e1e7d92e --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/unit/test_{{ MOD.NAME }}.py.j2 @@ -0,0 +1,15 @@ +import pytest +from {{ PKG.NAME }}.{{ MOD.NAME }} import {{ MOD.NAME }} + +@pytest.marker.unit +def test_resource(): + assert {{ MOD.NAME }}.resource == "{{ MOD.RESOURCE_KIND | make_singular | capitalize }}" + +@pytest.marker.unit +def test_type(): + assert {{ MOD.NAME }}.type == "{{ MOD.NAME }}" + +@pytest.marker.unit +def test_serialization(): + {{ MOD.NAME|lower() }} = {{ MOD.NAME }}() + assert {{ MOD.NAME }}.id == {{ MOD.NAME }}.model_validate_json({{ MOD.NAME | lower() }}.model_dump_json()) \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/xfail/test_xfail_{{ PKG.NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/tests/xfail/test_xfail_{{ PKG.NAME }}.py.j2 new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py.j2 new file mode 100644 index 000000000..37a2441e4 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/__init__.py.j2 @@ -0,0 +1,23 @@ +{%- for mod in PKG.MODULES %} +from .{{ mod.NAME }} import {{ mod.NAME }} + +{%- endfor %} + +__all__ = [ +{%- for mod in PKG.MODULES %} + "{{ mod.NAME }}"{%- if not loop.last %},{%- endif %} +{%- endfor %} +] + +try: + # For Python 3.8 and newer + from importlib.metadata import version, PackageNotFoundError +except ImportError: + # For older Python versions, use the backport + from importlib_metadata import version, PackageNotFoundError + +try: + __version__ = version("{{ PKG.NAME }}") +except PackageNotFoundError: + # If the package is not installed (for example, during development) + __version__ = "0.0.0" diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py.j2 new file mode 100644 index 000000000..2272bc6d9 --- /dev/null +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/templates/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/{{ PKG.NAME }}/{{ MOD.NAME }}.py.j2 @@ -0,0 +1,10 @@ +from swarmauri_core.ComponentBase import ComponentBase + + +@ComponentBase.register_type({{ MOD.BASE_NAME }}, "{{ MOD.NAME }}") +class {{ MOD.NAME }}({% if mixins %}{{ MIXINS|join(', ') }},{% endif %}{{ MOD.BASE_NAME }}, ComponentBase): + type: Literal["{{ MOD.NAME }}"] = "{{ MOD.NAME }}" + + + # Methods + \ No newline at end of file diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/src/updates.py b/pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/updates.py similarity index 100% rename from pkgs/experimental/swarmauri_experimental/ptree_dag/src/updates.py rename to pkgs/experimental/swarmauri_experimental/ptree_dag/swarmauri_ptree_dag/updates.py diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/templatesv2/test3/ptree.yaml.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/templatesv2/test3/ptree.yaml.j2 index 48ad0cd86..61b40dea3 100644 --- a/pkgs/experimental/swarmauri_experimental/ptree_dag/templatesv2/test3/ptree.yaml.j2 +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/templatesv2/test3/ptree.yaml.j2 @@ -234,6 +234,8 @@ REQUIREMENTS: - "Must use pytest." - "Must use @pytest.mark.i9n decorator to mark test cases." + - "Must test the package initialization." + - "Must test the package versioning." - "Where applicable use pytest fixtures." {% endfor %} diff --git a/pkgs/experimental/swarmauri_experimental/ptree_dag/templatesv2/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 b/pkgs/experimental/swarmauri_experimental/ptree_dag/templatesv2/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 index 7b5850688..c69d45149 100644 --- a/pkgs/experimental/swarmauri_experimental/ptree_dag/templatesv2/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 +++ b/pkgs/experimental/swarmauri_experimental/ptree_dag/templatesv2/test3/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/pyproject.toml.j2 @@ -1,11 +1,11 @@ [tool.poetry] name = "{{ PKG.NAME }}" -version = "0.1.0.dev1" +version = "0.6.1.dev1" description = "{{ PKG.DESCRIPTION }}" authors = ["Jacob Stewart "] license = "Apache-2.0" readme = "README.md" -repository = "http://github.com/swarmauri/swarmauri-sdk/pkgs/{{ PROJECT_ROOT }}/{{ PKG.NAME }}/" +repository = "http://github.com/swarmauri/swarmauri-sdk/{{ PROJECT_ROOT }}/community/{{ PKG.NAME }}/" classifiers = [ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3.10", @@ -19,9 +19,9 @@ keywords = [] python = ">=3.10,<3.13" # Swarmauri -swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/core"} -swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/0.6.0.dev1", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} +swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev, subdirectory = "pkgs/base"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies diff --git a/pkgs/experimental/swarmauri_experimental/pyproject.toml b/pkgs/experimental/swarmauri_experimental/pyproject.toml index 68a1d0d81..40de0e748 100644 --- a/pkgs/experimental/swarmauri_experimental/pyproject.toml +++ b/pkgs/experimental/swarmauri_experimental/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-experimental" -version = "0.6.1.dev9" +version = "0.6.1" description = "This repository includes experimental components." authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/plugins/example_plugin/pyproject.toml b/pkgs/plugins/example_plugin/pyproject.toml index 14788c6a2..55bc5eaee 100644 --- a/pkgs/plugins/example_plugin/pyproject.toml +++ b/pkgs/plugins/example_plugin/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swm-example-plugin" -version = "0.6.1.dev9" +version = "0.6.1" description = "This repository includes an example of a Swarmauri Plugin." authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/poetry.lock b/pkgs/poetry.lock index 527991c41..4511d9604 100644 --- a/pkgs/poetry.lock +++ b/pkgs/poetry.lock @@ -194,18 +194,19 @@ typecheck = ["mypy"] [[package]] name = "beautifulsoup4" -version = "4.12.3" +version = "4.13.3" description = "Screen-scraping library" optional = false -python-versions = ">=3.6.0" +python-versions = ">=3.7.0" groups = ["main"] files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, + {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, + {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, ] [package.dependencies] soupsieve = ">1.2" +typing-extensions = ">=4.0.0" [package.extras] cchardet = ["cchardet"] @@ -1708,14 +1709,14 @@ socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" -version = "0.28.1" +version = "0.29.0" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" groups = ["main"] files = [ - {file = "huggingface_hub-0.28.1-py3-none-any.whl", hash = "sha256:aa6b9a3ffdae939b72c464dbb0d7f99f56e649b55c3d52406f49e0a5a620c0a7"}, - {file = "huggingface_hub-0.28.1.tar.gz", hash = "sha256:893471090c98e3b6efbdfdacafe4052b20b84d59866fb6f54c33d9af18c303ae"}, + {file = "huggingface_hub-0.29.0-py3-none-any.whl", hash = "sha256:c02daa0b6bafbdacb1320fdfd1dc7151d0940825c88c4ef89837fdb1f6ea0afe"}, + {file = "huggingface_hub-0.29.0.tar.gz", hash = "sha256:64034c852be270cac16c5743fe1f659b14515a9de6342d6f42cbb2ede191fc80"}, ] [package.dependencies] @@ -2231,14 +2232,14 @@ files = [ [[package]] name = "kubernetes" -version = "32.0.0" +version = "32.0.1" description = "Kubernetes python client" optional = false python-versions = ">=3.6" groups = ["main"] files = [ - {file = "kubernetes-32.0.0-py2.py3-none-any.whl", hash = "sha256:60fd8c29e8e43d9c553ca4811895a687426717deba9c0a66fb2dcc3f5ef96692"}, - {file = "kubernetes-32.0.0.tar.gz", hash = "sha256:319fa840345a482001ac5d6062222daeb66ec4d1bcb3087402aed685adf0aecb"}, + {file = "kubernetes-32.0.1-py2.py3-none-any.whl", hash = "sha256:35282ab8493b938b08ab5526c7ce66588232df00ef5e1dbe88a419107dc10998"}, + {file = "kubernetes-32.0.1.tar.gz", hash = "sha256:42f43d49abd437ada79a79a16bd48a604d3471a117a8347e87db693f2ba0ba28"}, ] [package.dependencies] @@ -3778,14 +3779,14 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p [[package]] name = "posthog" -version = "3.13.0" +version = "3.14.1" description = "Integrate PostHog into any python application." optional = false python-versions = "*" groups = ["main"] files = [ - {file = "posthog-3.13.0-py2.py3-none-any.whl", hash = "sha256:0afd0132055a3da9c6b0ecf763e7f2ce2b66659ef16169883394d0835c30d501"}, - {file = "posthog-3.13.0.tar.gz", hash = "sha256:54e9de232459846b1686a0cfb58acb02b7ccda379d837e1eb1c3af62c3775915"}, + {file = "posthog-3.14.1-py2.py3-none-any.whl", hash = "sha256:ee305413e9bc8a81f7092e5b38373afc14b00f9550021ed74cdc9d6074bfce28"}, + {file = "posthog-3.14.1.tar.gz", hash = "sha256:c8ea74ac6b3dff726e08d9e0f11f4726be46f0b4d335cc51ea25568fc8d23d99"}, ] [package.dependencies] @@ -5461,7 +5462,7 @@ full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart [[package]] name = "swarmauri" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Namespace package for components, packages, and plugins within the Swarmauri framework." optional = false python-versions = ">=3.10,<3.13" @@ -5474,7 +5475,7 @@ httpx = "^0.27.0" pydantic = "^2.9.2" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} toml = "^0.10.2" typing_extensions = "*" @@ -5494,12 +5495,12 @@ vectorstore-tfidf = ["swarmauri_vectorstore_tfidf @ git+https://github.com/swarm type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/swarmauri" [[package]] name = "swarmauri-base" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "This repository includes base classes and mixins for the Swarmauri framework." optional = false python-versions = ">=3.10,<3.13" @@ -5515,12 +5516,12 @@ swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/base" [[package]] name = "swarmauri-core" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "This repository includes core interfaces for the Swarmauri framework." optional = false python-versions = ">=3.10,<3.13" @@ -5536,12 +5537,12 @@ pyyaml = "^6.0.2" type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/core" [[package]] name = "swarmauri-distance-minkowski" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Minkowski Distance for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -5553,18 +5554,18 @@ develop = false scipy = ">=1.7.0,<1.14.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/standards/swarmauri_distance_minkowski" [[package]] name = "swarmauri-documentstore-redis" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Psutil Tool" optional = false python-versions = ">=3.10,<3.13" @@ -5576,18 +5577,18 @@ develop = false redis = "^4.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_documentstore_redis" [[package]] name = "swarmauri-embedding-doc2vec" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "A Doc2Vec based Embedding Model." optional = false python-versions = ">=3.10,<3.13" @@ -5599,18 +5600,18 @@ develop = false gensim = "==4.3.3" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec" [[package]] name = "swarmauri-embedding-mlm" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "example community package" optional = false python-versions = ">=3.10,<3.13" @@ -5621,18 +5622,18 @@ develop = false [package.dependencies] swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_embedding_mlm" [[package]] name = "swarmauri-embedding-nmf" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "NMF Embedding for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -5644,18 +5645,18 @@ develop = false scikit-learn = "^1.4.2" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/standards/swarmauri_embedding_nmf" [[package]] name = "swarmauri-embedding-tfidf" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Tfidf Embedding for Swarmauri" optional = false python-versions = ">=3.10,<3.13" @@ -5667,18 +5668,18 @@ develop = false scikit-learn = "^1.4.2" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/standards/swarmauri_embedding_tfidf" [[package]] name = "swarmauri-llm-leptonai" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Lepton AI Model" optional = false python-versions = ">=3.10,<3.13" @@ -5690,18 +5691,18 @@ develop = false openai = "^1.62.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_llm_leptonai" [[package]] name = "swarmauri-measurement-mutualinformation" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Mutual Information Measurement Community Package." optional = false python-versions = ">=3.10,<3.13" @@ -5719,12 +5720,12 @@ swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_measurement_mutualinformation" [[package]] name = "swarmauri-measurement-tokencountestimator" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "This repository includes an example of a First Class Swarmauri Example." optional = false python-versions = ">=3.10,<3.13" @@ -5741,12 +5742,12 @@ tiktoken = "^0.8.0" type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_measurement_tokencountestimator" [[package]] name = "swarmauri-ocr-pytesseract" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Tesseract Image to Text Model" optional = false python-versions = ">=3.10,<3.13" @@ -5763,12 +5764,12 @@ swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_ocr_pytesseract" [[package]] name = "swarmauri-parser-beautifulsoupelement" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "BeautifulSoup Element Parser for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -5777,21 +5778,21 @@ files = [] develop = false [package.dependencies] -beautifulsoup4 = "04.12.3" +beautifulsoup4 = "^4.13.3" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/standards/swarmauri_parser_beautifulsoupelement" [[package]] name = "swarmauri-parser-bertembedding" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Bert Embedding Parser" optional = false python-versions = ">=3.10,<3.13" @@ -5802,19 +5803,19 @@ develop = false [package.dependencies] swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} transformers = ">=4.45.0" [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_parser_bertembedding" [[package]] name = "swarmauri-parser-entityrecognition" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Entity Recognition Parser for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -5826,18 +5827,18 @@ develop = false spacy = ">=3.0.0,<=3.8.2" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_parser_entityrecognition" [[package]] name = "swarmauri-parser-fitzpdf" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Fitz PDF Parser for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -5849,18 +5850,18 @@ develop = false PyMuPDF = "^1.24.12" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_parser_fitzpdf" [[package]] name = "swarmauri-parser-keywordextractor" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Keyword Extractor Parser for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -5871,19 +5872,19 @@ develop = false [package.dependencies] swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} yake = "==0.4.8" [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/standards/swarmauri_parser_keywordextractor" [[package]] name = "swarmauri-parser-pypdf2" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "PyPDF2 Parser for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -5895,18 +5896,18 @@ develop = false PyPDF2 = "^3.0.1" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_parser_pypdf2" [[package]] name = "swarmauri-parser-textblob" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "TextBlob Parser for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -5918,19 +5919,19 @@ develop = false nltk = "^3.9.1" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} textblob = "^0.18.0" [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_parser_textblob" [[package]] name = "swarmauri-standard" -version = "0.6.1.dev8" +version = "0.6.1.dev3" description = "This repository includes standard components within the Swarmauri framework." optional = false python-versions = ">=3.10,<3.13" @@ -5956,12 +5957,12 @@ typing_extensions = "*" type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" -subdirectory = "pkgs/standards/swarmauri_standard" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" +subdirectory = "pkgs/swarmauri_standard" [[package]] name = "swarmauri-state-clipboard" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Community Clipboard State" optional = false python-versions = ">=3.10,<3.13" @@ -5978,12 +5979,12 @@ swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_state_clipboard" [[package]] name = "swarmauri-tool-captchagenerator" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Community Captcha Generator Tool" optional = false python-versions = ">=3.10,<3.13" @@ -5995,18 +5996,18 @@ develop = false captcha = "^0.6.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_captchagenerator" [[package]] name = "swarmauri-tool-dalechallreadability" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Community Dale-Chall Readability Tool" optional = false python-versions = ">=3.10,<3.13" @@ -6017,19 +6018,19 @@ develop = false [package.dependencies] swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} textstat = "^0.7.4" [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_dalechallreadability" [[package]] name = "swarmauri-tool-downloadpdf" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Community Download PDF Tool" optional = false python-versions = ">=3.10,<3.13" @@ -6040,18 +6041,18 @@ develop = false [package.dependencies] swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_downloadpdf" [[package]] name = "swarmauri-tool-entityrecognition" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Community Entity Recognition Tool" optional = false python-versions = ">=3.10,<3.13" @@ -6062,19 +6063,19 @@ develop = false [package.dependencies] swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} transformers = ">=4.45.0" [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_entityrecognition" [[package]] name = "swarmauri-tool-folium" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "This repository includes an example of a First Class Swarmauri Example." optional = false python-versions = ">=3.10,<3.13" @@ -6086,18 +6087,18 @@ develop = false folium = "^0.18.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_folium" [[package]] name = "swarmauri-tool-gmail" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "example community package" optional = false python-versions = ">=3.10,<3.13" @@ -6109,18 +6110,18 @@ develop = false google-api-python-client = "^2.157.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_gmail" [[package]] name = "swarmauri-tool-lexicaldensity" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Lexical Density Tool for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -6132,19 +6133,19 @@ develop = false nltk = "^3.9.1" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} textstat = "^0.7.4" [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_lexicaldensity" [[package]] name = "swarmauri-tool-matplotlib" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Matplotlib tool for Swarmauri." optional = false python-versions = ">=3.10,<3.13" @@ -6156,18 +6157,18 @@ develop = false matplotlib = ">=3.9.2" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/standards/swarmauri_tool_matplotlib" [[package]] name = "swarmauri-tool-psutil" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Psutil Tool." optional = false python-versions = ">=3.10,<3.13" @@ -6179,18 +6180,18 @@ develop = false psutil = "^6.1.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_psutil" [[package]] name = "swarmauri-tool-qrcodegenerator" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri QR Code Generator Tool." optional = false python-versions = ">=3.10,<3.13" @@ -6202,18 +6203,18 @@ develop = false qrcode = "^7.3.1" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_qrcodegenerator" [[package]] name = "swarmauri-tool-sentencecomplexity" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "This repository includes an example of a First Class Swarmauri Example." optional = false python-versions = ">=3.10,<3.13" @@ -6225,18 +6226,18 @@ develop = false nltk = "^3.9.1" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_sentencecomplexity" [[package]] name = "swarmauri-tool-sentimentanalysis" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Sentiment Analysis Tool" optional = false python-versions = ">=3.10,<3.13" @@ -6247,18 +6248,18 @@ develop = false [package.dependencies] swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_sentimentanalysis" [[package]] name = "swarmauri-tool-smogindex" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Smog Index Tool." optional = false python-versions = ">=3.10,<3.13" @@ -6270,18 +6271,18 @@ develop = false nltk = "^3.9.1" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_smogindex" [[package]] name = "swarmauri-tool-textlength" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Text Length Tool for Swarmauri" optional = false python-versions = ">=3.10,<3.13" @@ -6293,18 +6294,18 @@ develop = false nltk = "^3.9.1" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_textlength" [[package]] name = "swarmauri-tool-webscraping" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Web Scraping Tool for Swarmauri" optional = false python-versions = ">=3.10,<3.13" @@ -6316,18 +6317,18 @@ develop = false beautifulsoup4 = "^4.10.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_webscraping" [[package]] name = "swarmauri-tool-zapierhook" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Zapier Hook Tool" optional = false python-versions = ">=3.10,<3.13" @@ -6338,18 +6339,18 @@ develop = false [package.dependencies] swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_tool_zapierhook" [[package]] name = "swarmauri-toolkit-github" -version = "0.6.1.dev8" +version = "0.6.1.dev18" description = "Github Toolkit" optional = false python-versions = ">=3.10,<3.13" @@ -6361,18 +6362,18 @@ develop = false pygithub = "^2.4.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_toolkit_github" [[package]] name = "swarmauri-vectorstore-annoy" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Annoy Vector Store" optional = false python-versions = ">=3.10,<3.13" @@ -6385,18 +6386,18 @@ annoy = "^1.17.3" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_embedding_doc2vec = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_vectorstore_annoy" [[package]] name = "swarmauri-vectorstore-cloudweaviate" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Weaviate Vector Store" optional = false python-versions = ">=3.10,<3.13" @@ -6408,19 +6409,19 @@ develop = false swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_embedding_doc2vec = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} weaviate-client = "^4.9.2" [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_vectorstore_cloudweaviate" [[package]] name = "swarmauri-vectorstore-doc2vec" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "A Doc2Vec based Vector Store and Doc2Vec Based Embedding Model." optional = false python-versions = ">=3.10,<3.13" @@ -6432,18 +6433,18 @@ develop = false swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_embedding_doc2vec = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/standards/swarmauri_vectorstore_doc2vec" [[package]] name = "swarmauri-vectorstore-duckdb" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "A DuckDB based Vector Store" optional = false python-versions = ">=3.10,<3.13" @@ -6456,18 +6457,18 @@ duckdb = "^1.1.1" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_embedding_doc2vec = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_vectorstore_duckdb" [[package]] name = "swarmauri-vectorstore-mlm" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri MLM Vector Store" optional = false python-versions = ">=3.10,<3.13" @@ -6479,7 +6480,7 @@ develop = false swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_embedding_doc2vec = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} torch = "^2.4.1" transformers = ">=4.45.0" @@ -6487,12 +6488,12 @@ transformers = ">=4.45.0" type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_vectorstore_mlm" [[package]] name = "swarmauri-vectorstore-neo4j" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Neo4j Vector Store" optional = false python-versions = ">=3.10,<3.13" @@ -6504,18 +6505,18 @@ develop = false neo4j = "^5.25.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_vectorstore_neo4j" [[package]] name = "swarmauri-vectorstore-persistentchromadb" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "A Persistent ChromaDB based Vector Store" optional = false python-versions = ">=3.10,<3.13" @@ -6528,18 +6529,18 @@ chromadb = "^0.5.17" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_embedding_doc2vec = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_vectorstore_persistentchromadb" [[package]] name = "swarmauri-vectorstore-pinecone" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Pinecone Vector Store" optional = false python-versions = ">=3.10,<3.13" @@ -6552,18 +6553,18 @@ pinecone-client = {version = "^5.0.1", extras = ["grpc"]} swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_embedding_doc2vec = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_vectorstore_pinecone" [[package]] name = "swarmauri-vectorstore-qdrant" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Persistent Qdrant Vector Store" optional = false python-versions = ">=3.10,<3.13" @@ -6576,18 +6577,18 @@ qdrant-client = "^1.12.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_embedding_doc2vec = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_vectorstore_qdrant" [[package]] name = "swarmauri-vectorstore-redis" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "Swarmauri Redis Vector Store" optional = false python-versions = ">=3.10,<3.13" @@ -6599,18 +6600,18 @@ develop = false redis = "^4.0" swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/community/swarmauri_vectorstore_redis" [[package]] name = "swarmauri-vectorstore-tfidf" -version = "0.6.1.dev8" +version = "0.6.1.dev16" description = "A Tfidf based Vector Store and Tfidf Based Embedding Model." optional = false python-versions = ">=3.10,<3.13" @@ -6622,13 +6623,13 @@ develop = false swarmauri_base = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_embedding_tfidf = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_tfidf"} -swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard"} [package.source] type = "git" url = "https://github.com/swarmauri/swarmauri-sdk.git" reference = "mono/dev" -resolved_reference = "fb3581161f329af1d615896b7497809c8a86fea0" +resolved_reference = "26f60b55c77dfe71e52c1de6856bd055cdae31f1" subdirectory = "pkgs/standards/swarmauri_vectorstore_tfidf" [[package]] @@ -7685,4 +7686,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "6e3a7520e8ec68927caf5031d1f5891001d479cf5095f649b1db7c9ba137951b" +content-hash = "224da02aaf8a661ba8779a4d0664010322b6566892c5673050c4c448ee7289a4" diff --git a/pkgs/pyproject.toml b/pkgs/pyproject.toml index 042e31d83..fd0a92f54 100644 --- a/pkgs/pyproject.toml +++ b/pkgs/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-monorepo" -version = "0.6.1.dev9" +version = "0.6.1" description = "Monorepo for multiple interdependent Swarmauri Python packages" authors = ["Jacob Stewart "] # Disables packaging mode @@ -17,7 +17,7 @@ ipython = "^8.28.0" ### swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core" } swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base" } -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } swarmauri = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri" } ### @@ -27,7 +27,7 @@ swarmauri_vectorstore_doc2vec = { git = "https://github.com/swarmauri/swarmauri- swarmauri_embedding_doc2vec = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} swarmauri_embedding_tfidf = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_tfidf"} swarmauri_embedding_nmf = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_nmf"} -swarmauri-tool-matplotlib = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_tool_matplotlib"} +swarmauri_tool_matplotlib = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_tool_matplotlib"} swarmauri_parser_keywordextractor = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_parser_keywordextractor"} swarmauri_parser_beautifulsoupelement = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_parser_beautifulsoupelement"} swarmauri_vectorstore_tfidf = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_vectorstore_tfidf"} diff --git a/pkgs/standards/swarmauri_distance_minkowski/pyproject.toml b/pkgs/standards/swarmauri_distance_minkowski/pyproject.toml index e8c9a58a4..a6d5949b0 100644 --- a/pkgs/standards/swarmauri_distance_minkowski/pyproject.toml +++ b/pkgs/standards/swarmauri_distance_minkowski/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_distance_minkowski" -version = "0.6.1.dev9" +version = "0.6.1" description = "Minkowski Distance for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies scipy = ">=1.7.0,<1.14.0" diff --git a/pkgs/standards/swarmauri_embedding_doc2vec/pyproject.toml b/pkgs/standards/swarmauri_embedding_doc2vec/pyproject.toml index 1fc34b9b1..ae10232f8 100644 --- a/pkgs/standards/swarmauri_embedding_doc2vec/pyproject.toml +++ b/pkgs/standards/swarmauri_embedding_doc2vec/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_embedding_doc2vec" -version = "0.6.1.dev9" +version = "0.6.1" description = "A Doc2Vec based Embedding Model." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies gensim = "==4.3.3" diff --git a/pkgs/standards/swarmauri_embedding_nmf/pyproject.toml b/pkgs/standards/swarmauri_embedding_nmf/pyproject.toml index c449b3782..3a93c3ebd 100644 --- a/pkgs/standards/swarmauri_embedding_nmf/pyproject.toml +++ b/pkgs/standards/swarmauri_embedding_nmf/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_embedding_nmf" -version = "0.6.1.dev9" +version = "0.6.1" description = "NMF Embedding for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies scikit-learn = "^1.4.2" diff --git a/pkgs/standards/swarmauri_embedding_tfidf/pyproject.toml b/pkgs/standards/swarmauri_embedding_tfidf/pyproject.toml index 67a1dac1c..8fdda9660 100644 --- a/pkgs/standards/swarmauri_embedding_tfidf/pyproject.toml +++ b/pkgs/standards/swarmauri_embedding_tfidf/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_embedding_tfidf" -version = "0.6.1.dev9" +version = "0.6.1" description = "Tfidf Embedding for Swarmauri" authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies scikit-learn = "^1.4.2" diff --git a/pkgs/standards/swarmauri_parser_beautifulsoupelement/pyproject.toml b/pkgs/standards/swarmauri_parser_beautifulsoupelement/pyproject.toml index 9ddbfc6e5..134d8bec3 100644 --- a/pkgs/standards/swarmauri_parser_beautifulsoupelement/pyproject.toml +++ b/pkgs/standards/swarmauri_parser_beautifulsoupelement/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_parser_beautifulsoupelement" -version = "0.6.1.dev9" +version = "0.6.1" description = "BeautifulSoup Element Parser for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,10 +19,10 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies -beautifulsoup4 = "04.12.3" +beautifulsoup4 = "^4.13.3" [tool.poetry.group.dev.dependencies] flake8 = "^7.0" diff --git a/pkgs/standards/swarmauri_parser_keywordextractor/pyproject.toml b/pkgs/standards/swarmauri_parser_keywordextractor/pyproject.toml index 529f295dc..ba78d4aba 100644 --- a/pkgs/standards/swarmauri_parser_keywordextractor/pyproject.toml +++ b/pkgs/standards/swarmauri_parser_keywordextractor/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_parser_keywordextractor" -version = "0.6.1.dev9" +version = "0.6.1" description = "Keyword Extractor Parser for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/MinkowskiDistance.py b/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/MinkowskiDistance.py deleted file mode 100644 index c7e2de3aa..000000000 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/MinkowskiDistance.py +++ /dev/null @@ -1,68 +0,0 @@ -from typing import List, Literal -from scipy.spatial.distance import minkowski -from swarmauri_standard.vectors.Vector import Vector -from swarmauri_base.distances.DistanceBase import DistanceBase -from swarmauri_core.ComponentBase import ComponentBase - - -@ComponentBase.register_type(DistanceBase, "MinkowskiDistance") -class MinkowskiDistance(DistanceBase): - """ - Implementation of the IDistanceSimiliarity interface using the Minkowski distance metric. - Minkowski distance is a generalized metric form that includes Euclidean distance, - Manhattan distance, and others depending on the order (p) parameter. - - The class provides methods to compute the Minkowski distance between two vectors. - - Parameters: - - p (int): The order of the Minkowski distance. p=2 corresponds to the Euclidean distance, - while p=1 corresponds to the Manhattan distance. Default is - """ - - type: Literal["MinkowskiDistance"] = "MinkowskiDistance" - p: int = 2 - - def distance(self, vector_a: Vector, vector_b: Vector) -> float: - """ - Computes the Minkowski distance between two vectors. - - Args: - vector_a (Vector): The first vector in the comparison. - vector_b (Vector): The second vector in the comparison. - - Returns: - float: The computed Minkowski distance between vector_a and vector_b. - """ - # Check if both vectors have the same dimensionality - if vector_a.shape != vector_b.shape: - raise ValueError("Vectors must have the same dimensionality.") - - # Extract data from Vector instances - data_a = vector_a.value - data_b = vector_b.value - - # Calculate and return the Minkowski distance - return minkowski(data_a, data_b, p=self.p) - - def similarity(self, vector_a: Vector, vector_b: Vector) -> float: - """ - Compute the similarity between two vectors based on the Minkowski distance. - The similarity is inversely related to the distance. - - Args: - vector_a (Vector): The first vector to compare for similarity. - vector_b (Vector): The second vector to compare with the first vector. - - Returns: - float: A similarity score between vector_a and vector_b. - """ - dist = self.distance(vector_a, vector_b) - return 1 / (1 + dist) # An example similarity score - - def distances(self, vector_a: Vector, vectors_b: List[Vector]) -> List[float]: - distances = [self.distance(vector_a, vector_b) for vector_b in vectors_b] - return distances - - def similarities(self, vector_a: Vector, vectors_b: List[Vector]) -> List[float]: - similarities = [self.similarity(vector_a, vector_b) for vector_b in vectors_b] - return similarities diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/NmfEmbedding.py b/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/NmfEmbedding.py deleted file mode 100644 index 30a1dd421..000000000 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/NmfEmbedding.py +++ /dev/null @@ -1,111 +0,0 @@ -from typing import Any, List, Literal - -import joblib -from pydantic import PrivateAttr -from sklearn.decomposition import NMF -from sklearn.feature_extraction.text import TfidfVectorizer -from swarmauri_base.embeddings.EmbeddingBase import EmbeddingBase -from swarmauri_core.ComponentBase import ComponentBase - -from swarmauri_standard.vectors.Vector import Vector - - -@ComponentBase.register_type(EmbeddingBase, "NmfEmbedding") -class NmfEmbedding(EmbeddingBase): - n_components: int = 10 - _tfidf_vectorizer = PrivateAttr() - _model = PrivateAttr() - feature_names: List[Any] = [] - - type: Literal["NmfEmbedding"] = "NmfEmbedding" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - # Initialize TF-IDF Vectorizer - self._tfidf_vectorizer = TfidfVectorizer() - # Initialize NMF with the desired number of components - self._model = NMF(n_components=self.n_components) - - def fit(self, data): - """ - Fit the NMF model to data. - - Args: - data (Union[str, Any]): The text data to fit. - """ - # Transform data into TF-IDF matrix - tfidf_matrix = self._tfidf_vectorizer.fit_transform(data) - # Fit the NMF model - self._model.fit(tfidf_matrix) - # Store feature names - self.feature_names = self._tfidf_vectorizer.get_feature_names_out() - - def transform(self, data): - """ - Transform new data into NMF feature space. - - Args: - data (Union[str, Any]): Text data to transform. - - Returns: - List[IVector]: A list of vectors representing the transformed data. - """ - # Transform data into TF-IDF matrix - tfidf_matrix = self._tfidf_vectorizer.transform(data) - # Transform TF-IDF matrix into NMF space - nmf_features = self._model.transform(tfidf_matrix) - - # Wrap NMF features in SimpleVector instances and return - return [Vector(value=features.tolist()) for features in nmf_features] - - def fit_transform(self, data): - """ - Fit the model to data and then transform it. - - Args: - data (Union[str, Any]): The text data to fit and transform. - - Returns: - List[IVector]: A list of vectors representing the fitted and transformed data. - """ - self.fit(data) - return self.transform(data) - - def infer_vector(self, data): - """ - Convenience method for transforming a single data point. - - Args: - data (Union[str, Any]): Single text data to transform. - - Returns: - IVector: A vector representing the transformed single data point. - """ - return self.transform([data])[0] - - def extract_features(self): - """ - Extract the feature names from the TF-IDF vectorizer. - - Returns: - The feature names. - """ - return self.feature_names.tolist() - - def save_model(self, path: str) -> None: - """ - Saves the NMF model and TF-IDF vectorizer using joblib. - """ - # It might be necessary to save both tfidf_vectorizer and model - # Consider using a directory for 'path' or appended identifiers for each model file - joblib.dump(self._tfidf_vectorizer, f"{path}_tfidf.joblib") - joblib.dump(self._model, f"{path}_nmf.joblib") - - def load_model(self, path: str) -> None: - """ - Loads the NMF model and TF-IDF vectorizer from paths using joblib. - """ - self._tfidf_vectorizer = joblib.load(f"{path}_tfidf.joblib") - self._model = joblib.load(f"{path}_nmf.joblib") - # Dependending on your implementation, you might need to refresh the feature_names - self.feature_names = self._tfidf_vectorizer.get_feature_names_out() diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/TfidfEmbedding.py b/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/TfidfEmbedding.py deleted file mode 100644 index 8274e6cda..000000000 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/TfidfEmbedding.py +++ /dev/null @@ -1,55 +0,0 @@ -from typing import Any, List, Literal, Union - -import joblib -from pydantic import PrivateAttr -from sklearn.feature_extraction.text import TfidfVectorizer as SklearnTfidfVectorizer -from swarmauri_base.embeddings.EmbeddingBase import EmbeddingBase -from swarmauri_core.ComponentBase import ComponentBase - -from swarmauri_standard.vectors.Vector import Vector - - -@ComponentBase.register_type(EmbeddingBase, "TfidfEmbedding") -class TfidfEmbedding(EmbeddingBase): - _model = PrivateAttr() - _fit_matrix = PrivateAttr() - type: Literal["TfidfEmbedding"] = "TfidfEmbedding" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self._model = SklearnTfidfVectorizer() - - def extract_features(self): - return self._model.get_feature_names_out().tolist() - - def fit(self, documents: List[str]) -> None: - self._fit_matrix = self._model.fit_transform(documents) - - def fit_transform(self, documents: List[str]) -> List[Vector]: - self._fit_matrix = self._model.fit_transform(documents) - # Convert the sparse matrix rows into Vector instances - vectors = [ - Vector(value=vector.toarray().flatten()) for vector in self._fit_matrix - ] - return vectors - - def transform(self, data: Union[str, Any], documents: List[str]) -> List[Vector]: - raise NotImplementedError("Transform not implemented on TFIDFVectorizer.") - - def infer_vector(self, data: str, documents: List[str]) -> Vector: - documents.append(data) - tmp_tfidf_matrix = self.fit_transform(documents) - query_vector = tmp_tfidf_matrix[-1] - return query_vector - - def save_model(self, path: str) -> None: - """ - Saves the TF-IDF model to the specified path using joblib. - """ - joblib.dump(self._model, path) - - def load_model(self, path: str) -> None: - """ - Loads a TF-IDF model from the specified path using joblib. - """ - self._model = joblib.load(path) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/BeautifulSoupElementParser.py b/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/BeautifulSoupElementParser.py deleted file mode 100644 index 828a240d1..000000000 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/BeautifulSoupElementParser.py +++ /dev/null @@ -1,47 +0,0 @@ -from bs4 import BeautifulSoup -from typing import List, Union, Any, Literal -from swarmauri_standard.documents.Document import Document -from swarmauri_base.parsers.ParserBase import ParserBase -from swarmauri_core.ComponentBase import ComponentBase - - -@ComponentBase.register_type(ParserBase, "BeautifulSoupElementParser") -class BeautifulSoupElementParser(ParserBase): - """ - A concrete parser that leverages BeautifulSoup to extract specific HTML elements and their content. - """ - - element: str - type: Literal["BeautifulSoupElementParser"] = "BeautifulSoupElementParser" - - def parse(self, data: Union[str, Any]) -> List[Document]: - """ - Parses the input data to extract specific HTML elements. - - Args: - data (Union[str, Any]): The HTML content to be parsed. - - Returns: - List[IDocument]: A list of documents containing the extracted elements. - """ - # Ensure that input is a string - if not isinstance(data, str): - raise ValueError( - "BeautifulSoupElementParser expects input data to be of type str." - ) - - # Initialize BeautifulSoup parser - soup = BeautifulSoup(data, "html.parser") - - # Find all specified elements - elements = soup.find_all(self.element) - - # Create a document for each element - documents = [ - Document( - content=str(element), metadata={"element": self.element, "index": index} - ) - for index, element in enumerate(elements) - ] - - return documents diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/KeywordExtractorParser.py b/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/KeywordExtractorParser.py deleted file mode 100644 index c4dba36d7..000000000 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/KeywordExtractorParser.py +++ /dev/null @@ -1,55 +0,0 @@ -import yake -from typing import List, Union, Any, Literal -from pydantic import ConfigDict, PrivateAttr -from swarmauri_standard.documents.Document import Document -from swarmauri_base.parsers.ParserBase import ParserBase -from swarmauri_core.ComponentBase import ComponentBase - - -@ComponentBase.register_type(ParserBase, "KeywordExtractorParser") -class KeywordExtractorParser(ParserBase): - """ - Extracts keywords from text using the YAKE keyword extraction library. - """ - - lang: str = "en" - num_keywords: int = 10 - _kw_extractor: yake.KeywordExtractor = PrivateAttr(default=None) - model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True) - type: Literal["KeywordExtractorParser"] = "KeywordExtractorParser" - - def __init__(self, **data): - super().__init__(**data) - self._kw_extractor = yake.KeywordExtractor( - lan=self.lang, - n=3, - dedupLim=0.9, - dedupFunc="seqm", - windowsSize=1, - top=self.num_keywords, - features=None, - ) - - def parse(self, data: Union[str, Any]) -> List[Document]: - """ - Extract keywords from input text and return as list of Document instances containing keyword information. - - Parameters: - - data (Union[str, Any]): The input text from which to extract keywords. - - Returns: - - List[Document]: A list of Document instances, each containing information about an extracted keyword. - """ - # Ensure data is in string format for analysis - text = str(data) if not isinstance(data, str) else data - - # Extract keywords using YAKE - keywords = self._kw_extractor.extract_keywords(text) - - # Create Document instances for each keyword - documents = [ - Document(content=keyword, metadata={"score": score}) - for index, (keyword, score) in enumerate(keywords) - ] - - return documents diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/MatplotlibCsvTool.py b/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/MatplotlibCsvTool.py deleted file mode 100644 index 6c9c37ab5..000000000 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/MatplotlibCsvTool.py +++ /dev/null @@ -1,78 +0,0 @@ -import pandas as pd -import matplotlib.pyplot as plt -import base64 -from typing import List, Literal, Dict -from pydantic import Field -from swarmauri_standard.tools.Parameter import Parameter -from swarmauri_base.tools.ToolBase import ToolBase -from swarmauri_core.ComponentBase import ComponentBase - - -@ComponentBase.register_type(ToolBase, "MatplotlibCsvTool") -class MatplotlibCsvTool(ToolBase): - type: Literal["MatplotlibCsvTool"] = "MatplotlibCsvTool" - name: str = Field( - "MatplotlibCsvTool", - description="Tool to generate plots from CSV data using Matplotlib.", - ) - description: str = Field( - "This tool reads data from a CSV file and generates a plot using Matplotlib.", - description="Description of the MatplotlibCsvTool", - ) - - parameters: List[Parameter] = Field( - default_factory=lambda: [ - Parameter( - name="csv_file", - type="string", - description="The path to the CSV file containing the data.", - required=True, - ), - Parameter( - name="x_column", - type="string", - description="The name of the column to use for the x-axis.", - required=True, - ), - Parameter( - name="y_column", - type="string", - description="The name of the column to use for the y-axis.", - required=True, - ), - Parameter( - name="output_file", - type="string", - description="The filename where the plot will be saved.", - required=True, - ), - ] - ) - - def __call__( - self, csv_file: str, x_column: str, y_column: str, output_file: str - ) -> Dict[str, str]: - # Read data from CSV - data = pd.read_csv(csv_file) - - # Check if columns exist in the DataFrame - if x_column not in data.columns or y_column not in data.columns: - raise ValueError( - f"Columns {x_column} and/or {y_column} not found in the CSV file." - ) - - # Generate plot - plt.figure(figsize=(10, 6)) - plt.plot(data[x_column], data[y_column], marker="o") - plt.xlabel(x_column) - plt.ylabel(y_column) - plt.title(f"{y_column} vs {x_column}") - plt.grid(True) - plt.savefig(output_file) - plt.close() - print(f"Plot generated and saved to {output_file}") - # Encode the plot image as base64 - with open(output_file, "rb") as image_file: - encoded_image = base64.b64encode(image_file.read()).decode("utf-8") - - return {"img_path": output_file, "img_base64": encoded_image, "data": []} diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/MatplotlibTool.py b/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/MatplotlibTool.py deleted file mode 100644 index ff77caf24..000000000 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/MatplotlibTool.py +++ /dev/null @@ -1,120 +0,0 @@ -import base64 -import matplotlib.pyplot as plt -from typing import List, Literal -from pydantic import Field -from swarmauri_base.tools.ToolBase import ToolBase -from swarmauri_standard.tools.Parameter import Parameter -from swarmauri_core.ComponentBase import ComponentBase - - -@ComponentBase.register_type(ToolBase, "MatplotlibTool") -class MatplotlibTool(ToolBase): - version: str = "1.0.0" - name: str = "MatplotlibTool" - description: str = ( - "Generates a plot using Matplotlib library based on provided configuration." - ) - type: Literal["MatplotlibTool"] = "MatplotlibTool" - - parameters: List[Parameter] = Field( - default_factory=lambda: [ - Parameter( - name="plot_type", - type="string", - description="Type of plot to generate (e.g., 'line', 'bar', 'scatter').", - required=True, - enum=["line", "bar", "scatter"], - ), - Parameter( - name="x_data", - type="list", - description="X-axis data for the plot.", - required=True, - ), - Parameter( - name="y_data", - type="list", - description="Y-axis data for the plot.", - required=True, - ), - Parameter( - name="title", - type="string", - description="Title of the plot.", - required=False, - default="", - ), - Parameter( - name="x_label", - type="string", - description="Label for the X-axis.", - required=False, - default="", - ), - Parameter( - name="y_label", - type="string", - description="Label for the Y-axis.", - required=False, - default="", - ), - Parameter( - name="save_path", - type="string", - description="Path to save the generated plot image.", - required=False, - default="plot.png", - ), - ] - ) - - def __call__( - self, - plot_type: str, - x_data: List[float], - y_data: List[float], - title: str = "", - x_label: str = "", - y_label: str = "", - save_path: str = "plot.png", - ): - """ - Generates a plot using Matplotlib based on provided configuration. - - Parameters: - plot_type (str): The type of the plot ('line', 'bar', 'scatter'). - x_data (List[float]): X-axis data for the plot. - y_data (List[float]): Y-axis data for the plot. - title (str): Title of the plot. - x_label (str): Label for the X-axis. - y_label (str): Label for the Y-axis. - save_path (str): Path to save the generated plot image. - - Returns: - str: Path where the plot image is saved. - """ - plt.figure() - - if plot_type == "line": - plt.plot(x_data, y_data) - elif plot_type == "bar": - plt.bar(x_data, y_data) - elif plot_type == "scatter": - plt.scatter(x_data, y_data) - else: - raise ValueError(f"Unsupported plot type: {plot_type}") - - if title: - plt.title(title) - if x_label: - plt.xlabel(x_label) - if y_label: - plt.ylabel(y_label) - - plt.savefig(save_path) - plt.close() - - with open(save_path, "rb") as image_file: - encoded_image = base64.b64encode(image_file.read()).decode("utf-8") - - return {"img_path": save_path, "img_base64": encoded_image, "data": []} diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/SentenceComplexityTool.py b/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/SentenceComplexityTool.py deleted file mode 100644 index 38150f666..000000000 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/SentenceComplexityTool.py +++ /dev/null @@ -1,80 +0,0 @@ -from typing import List, Literal, Dict -import nltk -from nltk.tokenize import sent_tokenize, word_tokenize -from pydantic import Field -from swarmauri_standard.tools.Parameter import Parameter -from swarmauri_base.tools.ToolBase import ToolBase -from swarmauri_core.ComponentBase import ComponentBase - -# Download required NLTK data once during module load -nltk.download("punkt", quiet=True) - - -@ComponentBase.register_type(ToolBase, "SentenceComplexityTool") -class SentenceComplexityTool(ToolBase): - version: str = "0.1.dev2" - parameters: List[Parameter] = Field( - default_factory=lambda: [ - Parameter( - name="text", - type="string", - description="The text to analyze for sentence complexity.", - required=True, - ) - ] - ) - - name: str = "SentenceComplexityTool" - description: str = "Evaluates sentence complexity based on average sentence length and the number of clauses." - type: Literal["SentenceComplexityTool"] = "SentenceComplexityTool" - - def __call__(self, text: str) -> Dict[str, float]: - """ - Evaluate sentence complexity based on average sentence length and the number of clauses. - - Parameters: - - text (str): The text to analyze. - - Returns: - - dict: A dictionary containing average sentence length and average number of clauses per sentence. - """ - if not text.strip(): - raise ValueError("Input text cannot be empty.") - - sentences = sent_tokenize(text) - num_sentences = len(sentences) - - if num_sentences == 0: - return {"average_sentence_length": 0.0, "average_clauses_per_sentence": 0.0} - - total_words = 0 - total_clauses = 0 - - for sentence in sentences: - words = word_tokenize(sentence) - total_words += len(words) - - # Improved clause counting method - clauses = sentence.count(",") + sentence.count(";") - clauses += sum( - sentence.lower().count(conj) - for conj in [ - "and", - "but", - "or", - "because", - "although", - "though", - "while", - "if", - ] - ) - total_clauses += clauses + 1 - - avg_sentence_length = total_words / num_sentences - avg_clauses_per_sentence = total_clauses / num_sentences - - return { - "average_sentence_length": avg_sentence_length, - "average_clauses_per_sentence": avg_clauses_per_sentence, - } diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/MinkowskiDistance_unit_test.py b/pkgs/standards/swarmauri_standard/tests/unit/distances/MinkowskiDistance_unit_test.py deleted file mode 100644 index 641b8d4d3..000000000 --- a/pkgs/standards/swarmauri_standard/tests/unit/distances/MinkowskiDistance_unit_test.py +++ /dev/null @@ -1,29 +0,0 @@ -import pytest -from swarmauri_standard.distances.MinkowskiDistance import MinkowskiDistance -from swarmauri_standard.vectors.Vector import Vector - - -@pytest.mark.unit -def test_ubc_resource(): - assert MinkowskiDistance().resource == "Distance" - - -@pytest.mark.unit -def test_ubc_type(): - assert MinkowskiDistance().type == "MinkowskiDistance" - - -@pytest.mark.unit -def test_serialization(): - distance = MinkowskiDistance() - assert ( - distance.id - == MinkowskiDistance.model_validate_json(distance.model_dump_json()).id - ) - - -@pytest.mark.unit -def test_distance(): - assert ( - MinkowskiDistance().distance(Vector(value=[1, 2]), Vector(value=[1, 2])) == 0.0 - ) diff --git a/pkgs/standards/swarmauri_standard/tests/unit/embeddings/NmfEmbedding_unit_test.py b/pkgs/standards/swarmauri_standard/tests/unit/embeddings/NmfEmbedding_unit_test.py deleted file mode 100644 index 727c782f9..000000000 --- a/pkgs/standards/swarmauri_standard/tests/unit/embeddings/NmfEmbedding_unit_test.py +++ /dev/null @@ -1,32 +0,0 @@ -import pytest -from swarmauri_standard.embeddings.NmfEmbedding import NmfEmbedding - - -@pytest.fixture(scope="module") -def nmf_embedder(): - return NmfEmbedding() - - -@pytest.mark.unit -def test_ubc_resource(nmf_embedder): - assert nmf_embedder.resource == "Embedding" - - -@pytest.mark.unit -def test_ubc_type(nmf_embedder): - assert nmf_embedder.type == "NmfEmbedding" - - -@pytest.mark.unit -def test_serialization(nmf_embedder): - assert ( - nmf_embedder.id - == NmfEmbedding.model_validate_json(nmf_embedder.model_dump_json()).id - ) - - -@pytest.mark.unit -def test_fit_transform(nmf_embedder): - documents = ["test", "test1", "test2"] - nmf_embedder.fit_transform(documents) - assert documents == nmf_embedder.extract_features() diff --git a/pkgs/standards/swarmauri_standard/tests/unit/embeddings/TfidfEmbedding_unit_test.py b/pkgs/standards/swarmauri_standard/tests/unit/embeddings/TfidfEmbedding_unit_test.py deleted file mode 100644 index e010724da..000000000 --- a/pkgs/standards/swarmauri_standard/tests/unit/embeddings/TfidfEmbedding_unit_test.py +++ /dev/null @@ -1,39 +0,0 @@ -import pytest -from swarmauri_standard.embeddings.TfidfEmbedding import TfidfEmbedding - - -@pytest.fixture(scope="module") -def tfidf_embedder(): - return TfidfEmbedding() - - -@pytest.mark.unit -def test_ubc_resource(tfidf_embedder): - assert tfidf_embedder.resource == "Embedding" - - -@pytest.mark.unit -def test_ubc_type(tfidf_embedder): - assert tfidf_embedder.type == "TfidfEmbedding" - - -@pytest.mark.unit -def test_serialization(tfidf_embedder): - assert ( - tfidf_embedder.id - == TfidfEmbedding.model_validate_json(tfidf_embedder.model_dump_json()).id - ) - - -@pytest.mark.unit -def test_fit_transform(tfidf_embedder): - documents = ["test", "test1", "test2"] - tfidf_embedder.fit_transform(documents) - assert documents == tfidf_embedder.extract_features() - - -@pytest.mark.unit -def test_infer_vector(tfidf_embedder): - documents = ["test", "test1", "test2"] - tfidf_embedder.fit_transform(documents) - assert tfidf_embedder.infer_vector("hi", documents).value == [1.0, 0.0, 0.0, 0.0] diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/BeautifulSoupElementParser_unit_test.py b/pkgs/standards/swarmauri_standard/tests/unit/parsers/BeautifulSoupElementParser_unit_test.py deleted file mode 100644 index 175280f2d..000000000 --- a/pkgs/standards/swarmauri_standard/tests/unit/parsers/BeautifulSoupElementParser_unit_test.py +++ /dev/null @@ -1,69 +0,0 @@ -import pytest -from swarmauri_standard.documents.Document import Document -from swarmauri_standard.parsers.BeautifulSoupElementParser import ( - BeautifulSoupElementParser as Parser, -) - - -@pytest.mark.unit -def test_ubc_resource(): - html_content = "

Sample HTML content

" - parser = Parser(element=html_content) - assert parser.resource == "Parser" - - -@pytest.mark.unit -def test_ubc_type(): - html_content = "

Sample HTML content

" - assert Parser(element=html_content).type == "BeautifulSoupElementParser" - - -@pytest.mark.unit -def test_initialization(): - html_content = "

Sample HTML content

" - parser = Parser(element=html_content) - assert isinstance(parser.id, str) - - -@pytest.mark.unit -def test_serialization(): - html_content = "

Sample HTML content

" - parser = Parser(element=html_content) - assert parser.id == Parser.model_validate_json(parser.model_dump_json()).id - - -@pytest.mark.parametrize( - "html_content, element, expected_count, expected_content", - [ - ( - "

First paragraph

Second paragraph

", - "p", - 2, - ["

First paragraph

", "

Second paragraph

"], - ), - ( - "
Some span content
", - "span", - 1, - ["Some span content"], - ), - ("

Header

Paragraph

", "h1", 1, ["

Header

"]), - ("
No matching tags here
", "a", 0, []), - ], -) -@pytest.mark.unit -def test_parse(html_content, element, expected_count, expected_content): - parser = Parser(element=element) - - documents = parser.parse(html_content) - - assert isinstance(documents, list), "The result should be a list." - assert len(documents) == expected_count, ( - f"Expected {expected_count} documents, got {len(documents)}." - ) - assert all(isinstance(doc, Document) for doc in documents), ( - "All items in the result should be Document instances." - ) - assert [doc.content for doc in documents] == expected_content, ( - "The content of documents does not match the expected content." - ) diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/KeywordExtractorParser_unit_test.py b/pkgs/standards/swarmauri_standard/tests/unit/parsers/KeywordExtractorParser_unit_test.py deleted file mode 100644 index 353fa6dc5..000000000 --- a/pkgs/standards/swarmauri_standard/tests/unit/parsers/KeywordExtractorParser_unit_test.py +++ /dev/null @@ -1,28 +0,0 @@ -import pytest -from swarmauri_standard.parsers.KeywordExtractorParser import ( - KeywordExtractorParser as Parser, -) - - -@pytest.mark.unit -def test_ubc_resource(): - parser = Parser() - assert parser.resource == "Parser" - - -@pytest.mark.unit -def test_ubc_type(): - parser = Parser() - assert parser.type == "KeywordExtractorParser" - - -@pytest.mark.unit -def test_serialization(): - parser = Parser() - assert parser.id == Parser.model_validate_json(parser.model_dump_json()).id - - -@pytest.mark.unit -def test_parse(): - assert Parser().parse("test two burgers")[2].resource == "Document" - assert Parser().parse("test two burgers")[2].content == "burgers" diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/MatplotlibCsvTool_unit_test.py b/pkgs/standards/swarmauri_standard/tests/unit/tools/MatplotlibCsvTool_unit_test.py deleted file mode 100644 index 9a1e61813..000000000 --- a/pkgs/standards/swarmauri_standard/tests/unit/tools/MatplotlibCsvTool_unit_test.py +++ /dev/null @@ -1,84 +0,0 @@ -import os -from tempfile import NamedTemporaryFile - -import pytest -from swarmauri_standard.tools.MatplotlibCsvTool import MatplotlibCsvTool as Tool - - -@pytest.mark.unit -def test_ubc_resource(): - tool = Tool() - assert tool.resource == "Tool" - - -@pytest.mark.unit -def test_ubc_type(): - assert Tool().type == "MatplotlibCsvTool" - - -@pytest.mark.unit -def test_initialization(): - tool = Tool() - assert type(tool.id) is str - - -@pytest.mark.unit -def test_serialization(): - tool = Tool() - assert tool.id == Tool.model_validate_json(tool.model_dump_json()).id - - -@pytest.mark.parametrize( - "csv_content, x_column, y_column, expected_error", - [ - ( - "x,y\n1,2\n3,4\n5,6", # CSV content - "x", # x_column - "y", # y_column - None, # No error expected - ), - ( - "a,b\n1,2\n3,4\n5,6", # CSV content - "x", # x_column - "y", # y_column - ValueError, # Error expected due to missing columns - ), - ( - "x,z\n1,2\n3,4\n5,6", # CSV content - "x", # x_column - "y", # y_column - ValueError, # Error expected due to missing y_column - ), - ], -) -@pytest.mark.unit -def test_call(csv_content, x_column, y_column, expected_error): - with NamedTemporaryFile(delete=False, suffix=".csv") as csv_file: - csv_file.write(csv_content.encode()) - csv_file_path = csv_file.name - - with NamedTemporaryFile(delete=False, suffix=".png") as output_file: - output_file_path = output_file.name - - tool = Tool() - expected_keys = {"img_path", "img_base64", "data"} - - if expected_error: - with pytest.raises(expected_error): - tool(csv_file_path, x_column, y_column, output_file_path) - else: - result = tool(csv_file_path, x_column, y_column, output_file_path) - assert isinstance(result, dict), ( - f"Expected dict, but got {type(result).__name__}" - ) - assert expected_keys.issubset(result.keys()), ( - f"Expected keys {expected_keys} but got {result.keys()}" - ) - assert isinstance(result.get("data"), list), ( - f"Expected list, but got {type(result).__name__}" - ) - assert os.path.exists(output_file_path) - - os.remove(csv_file_path) - if os.path.exists(output_file_path): - os.remove(output_file_path) diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/MatplotlibTool_unit_test.py b/pkgs/standards/swarmauri_standard/tests/unit/tools/MatplotlibTool_unit_test.py deleted file mode 100644 index 5d95a3d45..000000000 --- a/pkgs/standards/swarmauri_standard/tests/unit/tools/MatplotlibTool_unit_test.py +++ /dev/null @@ -1,77 +0,0 @@ -import os -import pytest -from swarmauri_standard.tools.MatplotlibTool import MatplotlibTool as Tool - - -@pytest.mark.unit -def test_ubc_resource(): - tool = Tool() - assert tool.resource == "Tool" - - -@pytest.mark.unit -def test_ubc_type(): - assert Tool().type == "MatplotlibTool" - - -@pytest.mark.unit -def test_initialization(): - tool = Tool() - assert type(tool.id) is str - - -@pytest.mark.unit -def test_serialization(): - tool = Tool() - assert tool.id == Tool.model_validate_json(tool.model_dump_json()).id - - -@pytest.mark.parametrize( - "plot_type, x_data, y_data, title, x_label, y_label, save_path", - [ - ( - "line", - [1, 2, 3], - [4, 5, 6], - "Line Plot", - "X-axis", - "Y-axis", - "test_line_plot.png", - ), - ( - "bar", - [1, 2, 3], - [4, 5, 6], - "Bar Plot", - "X-axis", - "Y-axis", - "test_bar_plot.png", - ), - ( - "scatter", - [1, 2, 3], - [4, 5, 6], - "Scatter Plot", - "X-axis", - "Y-axis", - "test_scatter_plot.png", - ), - ], -) -@pytest.mark.unit -def test_call(plot_type, x_data, y_data, title, x_label, y_label, save_path): - tool = Tool() - expected_keys = {"img_path", "img_base64", "data"} - - result = tool(plot_type, x_data, y_data, title, x_label, y_label, save_path) - - assert isinstance(result, dict), f"Expected dict, but got {type(result).__name__}" - assert expected_keys.issubset(result.keys()), ( - f"Expected keys {expected_keys} but got {result.keys()}" - ) - assert isinstance(result.get("data"), list), ( - f"Expected list, but got {type(result).__name__}" - ) - assert os.path.exists(save_path) - - os.remove(save_path) diff --git a/pkgs/standards/swarmauri_standard/tests/unit/vector_stores/SqliteVectorStore_unit_test.py b/pkgs/standards/swarmauri_standard/tests/unit/vector_stores/SqliteVectorStore_unit_test.py deleted file mode 100644 index 0dec25d51..000000000 --- a/pkgs/standards/swarmauri_standard/tests/unit/vector_stores/SqliteVectorStore_unit_test.py +++ /dev/null @@ -1,70 +0,0 @@ -import pytest -import tempfile -import os -from swarmauri_standard.documents.Document import Document -from swarmauri_standard.vector_stores.SqliteVectorStore import ( - SqliteVectorStore, -) - - -@pytest.fixture -def sqlite_db(): - # Create a temporary file that will act as the SQLite database - with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as temp_db: - db_path = temp_db.name - yield db_path - # Clean up the temporary database file after the test - os.remove(db_path) - - -@pytest.mark.unit -def test_ubc_resource(sqlite_db): - vs = SqliteVectorStore(db_path=sqlite_db) - assert vs.resource == "VectorStore" - # assert vs.embedder.resource == "Embedding" - - -@pytest.mark.unit -def test_ubc_type(sqlite_db): - vs = SqliteVectorStore(db_path=sqlite_db) - assert vs.type == "SqliteVectorStore" - - -@pytest.mark.unit -def test_serialization(sqlite_db): - vs = SqliteVectorStore(db_path=sqlite_db) - assert vs.id == SqliteVectorStore.model_validate_json(vs.model_dump_json()).id - - -@pytest.mark.unit -def test_top_k(sqlite_db): - vs = SqliteVectorStore(db_path=sqlite_db) - documents = [ - Document( - id="1", - content="test", - metadata={}, - embedding={"value": [0.1, 0.2, 0.3]}, - ), - Document( - id="2", - content="test1", - metadata={}, - embedding={"value": [0.4, 0.5, 0.6]}, - ), - Document( - id="3", - content="test2", - metadata={}, - embedding={"value": [0.7, 0.8, 0.9]}, - ), - Document( - id="4", - content="test3", - metadata={}, - embedding={"value": [0.1, 0.2, 0.2]}, - ), - ] - - vs.add_documents(documents) - assert len(vs.retrieve(query_vector=[0.1, 0.2, 0.25], top_k=2)) == 2 diff --git a/pkgs/standards/swarmauri_tool_matplotlib/pyproject.toml b/pkgs/standards/swarmauri_tool_matplotlib/pyproject.toml index 3b1f47757..72fbc7d5d 100644 --- a/pkgs/standards/swarmauri_tool_matplotlib/pyproject.toml +++ b/pkgs/standards/swarmauri_tool_matplotlib/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_tool_matplotlib" -version = "0.6.1.dev9" +version = "0.6.1" description = "Matplotlib tool for Swarmauri." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } # Dependencies matplotlib = ">=3.9.2" diff --git a/pkgs/standards/swarmauri_vectorstore_doc2vec/pyproject.toml b/pkgs/standards/swarmauri_vectorstore_doc2vec/pyproject.toml index a66d2d2b2..cbc02684a 100644 --- a/pkgs/standards/swarmauri_vectorstore_doc2vec/pyproject.toml +++ b/pkgs/standards/swarmauri_vectorstore_doc2vec/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_doc2vec" -version = "0.6.1.dev9" +version = "0.6.1" description = "A Doc2Vec based Vector Store and Doc2Vec Based Embedding Model." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -21,7 +21,7 @@ python = ">=3.10,<3.13" swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} swarmauri_embedding_doc2vec = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_doc2vec"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } [tool.poetry.group.dev.dependencies] flake8 = "^7.0" diff --git a/pkgs/standards/swarmauri_vectorstore_tfidf/pyproject.toml b/pkgs/standards/swarmauri_vectorstore_tfidf/pyproject.toml index 9dacd09b1..e7e5f3414 100644 --- a/pkgs/standards/swarmauri_vectorstore_tfidf/pyproject.toml +++ b/pkgs/standards/swarmauri_vectorstore_tfidf/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri_vectorstore_tfidf" -version = "0.6.1.dev9" +version = "0.6.1" description = "A Tfidf based Vector Store and Tfidf Based Embedding Model." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -19,7 +19,7 @@ python = ">=3.10,<3.13" # Swarmauri swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } swarmauri_embedding_tfidf = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_embedding_tfidf"} [tool.poetry.group.dev.dependencies] diff --git a/pkgs/standards/swm_example_package/pyproject.toml b/pkgs/standards/swm_example_package/pyproject.toml index 4ed6451de..4c5c2aa08 100644 --- a/pkgs/standards/swm_example_package/pyproject.toml +++ b/pkgs/standards/swm_example_package/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swm-example-package" -version = "0.6.1.dev9" +version = "0.6.1" description = "This repository includes an example of a First Class Swarmauri Example." authors = ["Jacob Stewart "] license = "Apache-2.0" diff --git a/pkgs/swarmauri/pyproject.toml b/pkgs/swarmauri/pyproject.toml index a28f5744d..dbd8e6bff 100644 --- a/pkgs/swarmauri/pyproject.toml +++ b/pkgs/swarmauri/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri" -version = "0.6.1.dev9" +version = "0.6.1" description = "Namespace package for components, packages, and plugins within the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -17,7 +17,7 @@ classifiers = [ python = ">=3.10,<3.13" swarmauri_core = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/base"} -swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/standards/swarmauri_standard"} +swarmauri_standard = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/swarmauri_standard" } toml = {version = "^0.10.2"} httpx = "^0.27.0" diff --git a/pkgs/swarmauri/swarmauri/interface_registry.py b/pkgs/swarmauri/swarmauri/interface_registry.py index 42098d3b5..a8c8db5a7 100644 --- a/pkgs/swarmauri/swarmauri/interface_registry.py +++ b/pkgs/swarmauri/swarmauri/interface_registry.py @@ -67,6 +67,7 @@ class InterfaceRegistry: "swarmauri.control_panels": ControlPanelBase, "swarmauri.conversations": ConversationBase, "swarmauri.dataconnectors": DataConnectorBase, + "swarmauri.decorators": None, "swarmauri.distances": DistanceBase, "swarmauri.documents": DocumentBase, "swarmauri.embeddings": EmbeddingBase, diff --git a/pkgs/swarmauri/swarmauri/plugin_citizenship_registry.py b/pkgs/swarmauri/swarmauri/plugin_citizenship_registry.py index 5d9a4f8e0..55104a650 100644 --- a/pkgs/swarmauri/swarmauri/plugin_citizenship_registry.py +++ b/pkgs/swarmauri/swarmauri/plugin_citizenship_registry.py @@ -77,7 +77,20 @@ class PluginCitizenshipRegistry: # "swarmauri.distances.MinkowskiDistance": "swarmauri_standard.distances.MinkowskiDistance", "swarmauri.distances.SorensenDiceDistance": "swarmauri_standard.distances.SorensenDiceDistance", "swarmauri.distances.SquaredEuclideanDistance": "swarmauri_standard.distances.SquaredEuclideanDistance", + ### + # decorators + ### + "swarmauri.decorators.maybe_async": "swarmauri.decorators.maybe_async", + "swarmauri.decorators.tool_decorator": "swarmauri.decorators.tool_decorator", + "swarmauri.decorators.retry_on_status_codes": "swarmauri.decorators.retry_on_status_codes", + "swarmauri.decorators.deprecate": "swarmauri.decorators.deprecate", + ### + # documents + ### "swarmauri.documents.Document": "swarmauri_standard.documents.Document", + ### + # embeddings + ### "swarmauri.embeddings.CohereEmbedding": "swarmauri_standard.embeddings.CohereEmbedding", "swarmauri.embeddings.GeminiEmbedding": "swarmauri_standard.embeddings.GeminiEmbedding", "swarmauri.embeddings.MistralEmbedding": "swarmauri_standard.embeddings.MistralEmbedding", @@ -93,6 +106,9 @@ class PluginCitizenshipRegistry: "swarmauri.image_gens.FalAIImgGenModel": "swarmauri_standard.image_gens.FalAIImgGenModel", "swarmauri.image_gens.HyperbolicImgGenModel": "swarmauri_standard.image_gens.HyperbolicImgGenModel", "swarmauri.image_gens.OpenAIImgGenModel": "swarmauri_standard.image_gens.OpenAIImgGenModel", + ### + # LLMS + ## "swarmauri.llms.AI21StudioModel": "swarmauri_standard.llms.AI21StudioModel", "swarmauri.llms.AnthropicModel": "swarmauri_standard.llms.AnthropicModel", "swarmauri.llms.AnthropicToolModel": "swarmauri_standard.llms.AnthropicToolModel", @@ -110,6 +126,7 @@ class PluginCitizenshipRegistry: "swarmauri.llms.HyperbolicAudioTTS": "swarmauri_standard.llms.HyperbolicAudioTTS", "swarmauri.llms.HyperbolicModel": "swarmauri_standard.llms.HyperbolicModel", "swarmauri.llms.HyperbolicVisionModel": "swarmauri_standard.llms.HyperbolicVisionModel", + "swarmauri.llms.LlamaCppModel": "swarmauri_standard.llms.LlamaCppModel", "swarmauri.llms.MistralModel": "swarmauri_standard.llms.MistralModel", "swarmauri.llms.MistralToolModel": "swarmauri_standard.llms.MistralToolModel", "swarmauri.llms.OpenAIAudio": "swarmauri_standard.llms.OpenAIAudio", @@ -119,6 +136,9 @@ class PluginCitizenshipRegistry: "swarmauri.llms.PerplexityModel": "swarmauri_standard.llms.PerplexityModel", "swarmauri.llms.PlayHTModel": "swarmauri_standard.llms.PlayHTModel", "swarmauri.llms.WhisperLargeModel": "swarmauri_standard.llms.WhisperLargeModel", + ### + # Tool LLMS + ### "swarmauri.tool_llms.OpenAIToolModel": "swarmauri_standard.tool_llms.OpenAIToolModel", "swarmauri.tool_llms.AnthropicToolModel": "swarmauri_standard.tool_llms.AnthropicToolModel", "swarmauri.tool_llms.CohereToolModel": "swarmauri_standard.tool_llms.CohereToolModel", @@ -204,6 +224,9 @@ class PluginCitizenshipRegistry: "swarmauri.tracing.TracedVariable": "swarmauri_standard.tracing.TracedVariable", "swarmauri.tracing.VariableTracer": "swarmauri_standard.tracing.VariableTracer", "swarmauri.transports.PubSubTransport": "swarmauri_standard.transports.PubSubTransport", + ### + # Utils + ## "swarmauri.utils.LazyLoader": "swarmauri_standard.utils.LazyLoader", "swarmauri.utils._get_subclasses": "swarmauri_standard.utils._get_subclasses", "swarmauri.utils._lazy_import": "swarmauri_standard.utils._lazy_import", @@ -233,6 +256,9 @@ class PluginCitizenshipRegistry: "swarmauri.utils.retry_decorator": "swarmauri_standard.utils.retry_decorator", "swarmauri.utils.sql_log": "swarmauri_standard.utils.sql_log", "swarmauri.utils.timeout_wrapper": "swarmauri_standard.utils.timeout_wrapper", + ### + # Vector Stores + ### "swarmauri.vector_stores.SqliteVectorStore": "swarmauri_standard.vector_stores.SqliteVectorStore", # "swarmauri.vector_stores.TfidfVectorStore": "swarmauri_standard.vector_stores.TfidfVectorStore", "swarmauri.vectors.Vector": "swarmauri_standard.vectors.Vector", diff --git a/pkgs/swarmauri_standard/LICENSE b/pkgs/swarmauri_standard/LICENSE new file mode 100644 index 000000000..b7b70230d --- /dev/null +++ b/pkgs/swarmauri_standard/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Jacob Stewart @ Swarmauri] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkgs/standards/swarmauri_standard/README.md b/pkgs/swarmauri_standard/README.md similarity index 100% rename from pkgs/standards/swarmauri_standard/README.md rename to pkgs/swarmauri_standard/README.md diff --git a/pkgs/swarmauri_standard/poetry.lock b/pkgs/swarmauri_standard/poetry.lock new file mode 100644 index 000000000..a27c53eb7 --- /dev/null +++ b/pkgs/swarmauri_standard/poetry.lock @@ -0,0 +1,2393 @@ +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. + +[[package]] +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.8.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "asttokens" +version = "3.0.0" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, +] + +[package.extras] +astroid = ["astroid (>=2,<4)"] +test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "attrs" +version = "25.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, + {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + +[[package]] +name = "beautifulsoup4" +version = "4.13.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.7.0" +groups = ["main"] +files = [ + {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, + {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, +] + +[package.dependencies] +soupsieve = ">1.2" +typing-extensions = ">=4.0.0" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "certifi" +version = "2025.1.31" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\""} + +[[package]] +name = "contourpy" +version = "1.3.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, + {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2f926efda994cdf3c8d3fdb40b9962f86edbc4457e739277b961eced3d0b4c1"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adce39d67c0edf383647a3a007de0a45fd1b08dedaa5318404f1a73059c2512b"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abbb49fb7dac584e5abc6636b7b2a7227111c4f771005853e7d25176daaf8453"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0cffcbede75c059f535725c1680dfb17b6ba8753f0c74b14e6a9c68c29d7ea3"}, + {file = "contourpy-1.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab29962927945d89d9b293eabd0d59aea28d887d4f3be6c22deaefbb938a7277"}, + {file = "contourpy-1.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:974d8145f8ca354498005b5b981165b74a195abfae9a8129df3e56771961d595"}, + {file = "contourpy-1.3.1-cp310-cp310-win32.whl", hash = "sha256:ac4578ac281983f63b400f7fe6c101bedc10651650eef012be1ccffcbacf3697"}, + {file = "contourpy-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:174e758c66bbc1c8576992cec9599ce8b6672b741b5d336b5c74e35ac382b18e"}, + {file = "contourpy-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8b974d8db2c5610fb4e76307e265de0edb655ae8169e8b21f41807ccbeec4b"}, + {file = "contourpy-1.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20914c8c973f41456337652a6eeca26d2148aa96dd7ac323b74516988bea89fc"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d40d37c1c3a4961b4619dd9d77b12124a453cc3d02bb31a07d58ef684d3d86"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:113231fe3825ebf6f15eaa8bc1f5b0ddc19d42b733345eae0934cb291beb88b6"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dbbc03a40f916a8420e420d63e96a1258d3d1b58cbdfd8d1f07b49fcbd38e85"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a04ecd68acbd77fa2d39723ceca4c3197cb2969633836ced1bea14e219d077c"}, + {file = "contourpy-1.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c414fc1ed8ee1dbd5da626cf3710c6013d3d27456651d156711fa24f24bd1291"}, + {file = "contourpy-1.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:31c1b55c1f34f80557d3830d3dd93ba722ce7e33a0b472cba0ec3b6535684d8f"}, + {file = "contourpy-1.3.1-cp311-cp311-win32.whl", hash = "sha256:f611e628ef06670df83fce17805c344710ca5cde01edfdc72751311da8585375"}, + {file = "contourpy-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b2bdca22a27e35f16794cf585832e542123296b4687f9fd96822db6bae17bfc9"}, + {file = "contourpy-1.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ffa84be8e0bd33410b17189f7164c3589c229ce5db85798076a3fa136d0e509"}, + {file = "contourpy-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805617228ba7e2cbbfb6c503858e626ab528ac2a32a04a2fe88ffaf6b02c32bc"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade08d343436a94e633db932e7e8407fe7de8083967962b46bdfc1b0ced39454"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47734d7073fb4590b4a40122b35917cd77be5722d80683b249dac1de266aac80"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ba94a401342fc0f8b948e57d977557fbf4d515f03c67682dd5c6191cb2d16ec"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa874e87e4a647fd2e4f514d5e91c7d493697127beb95e77d2f7561f6905bd9"}, + {file = "contourpy-1.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf98051f1045b15c87868dbaea84f92408337d4f81d0e449ee41920ea121d3b"}, + {file = "contourpy-1.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61332c87493b00091423e747ea78200659dc09bdf7fd69edd5e98cef5d3e9a8d"}, + {file = "contourpy-1.3.1-cp312-cp312-win32.whl", hash = "sha256:e914a8cb05ce5c809dd0fe350cfbb4e881bde5e2a38dc04e3afe1b3e58bd158e"}, + {file = "contourpy-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:08d9d449a61cf53033612cb368f3a1b26cd7835d9b8cd326647efe43bca7568d"}, + {file = "contourpy-1.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a761d9ccfc5e2ecd1bf05534eda382aa14c3e4f9205ba5b1684ecfe400716ef2"}, + {file = "contourpy-1.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:523a8ee12edfa36f6d2a49407f705a6ef4c5098de4f498619787e272de93f2d5"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece6df05e2c41bd46776fbc712e0996f7c94e0d0543af1656956d150c4ca7c81"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:573abb30e0e05bf31ed067d2f82500ecfdaec15627a59d63ea2d95714790f5c2"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fa36448e6a3a1a9a2ba23c02012c43ed88905ec80163f2ffe2421c7192a5d7"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ea9924d28fc5586bf0b42d15f590b10c224117e74409dd7a0be3b62b74a501c"}, + {file = "contourpy-1.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b75aa69cb4d6f137b36f7eb2ace9280cfb60c55dc5f61c731fdf6f037f958a3"}, + {file = "contourpy-1.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:041b640d4ec01922083645a94bb3b2e777e6b626788f4095cf21abbe266413c1"}, + {file = "contourpy-1.3.1-cp313-cp313-win32.whl", hash = "sha256:36987a15e8ace5f58d4d5da9dca82d498c2bbb28dff6e5d04fbfcc35a9cb3a82"}, + {file = "contourpy-1.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7895f46d47671fa7ceec40f31fae721da51ad34bdca0bee83e38870b1f47ffd"}, + {file = "contourpy-1.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9ddeb796389dadcd884c7eb07bd14ef12408aaae358f0e2ae24114d797eede30"}, + {file = "contourpy-1.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19c1555a6801c2f084c7ddc1c6e11f02eb6a6016ca1318dd5452ba3f613a1751"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841ad858cff65c2c04bf93875e384ccb82b654574a6d7f30453a04f04af71342"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4318af1c925fb9a4fb190559ef3eec206845f63e80fb603d47f2d6d67683901c"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14c102b0eab282427b662cb590f2e9340a9d91a1c297f48729431f2dcd16e14f"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e806338bfeaa006acbdeba0ad681a10be63b26e1b17317bfac3c5d98f36cda"}, + {file = "contourpy-1.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4d76d5993a34ef3df5181ba3c92fabb93f1eaa5729504fb03423fcd9f3177242"}, + {file = "contourpy-1.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:89785bb2a1980c1bd87f0cb1517a71cde374776a5f150936b82580ae6ead44a1"}, + {file = "contourpy-1.3.1-cp313-cp313t-win32.whl", hash = "sha256:8eb96e79b9f3dcadbad2a3891672f81cdcab7f95b27f28f1c67d75f045b6b4f1"}, + {file = "contourpy-1.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:287ccc248c9e0d0566934e7d606201abd74761b5703d804ff3df8935f523d546"}, + {file = "contourpy-1.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b457d6430833cee8e4b8e9b6f07aa1c161e5e0d52e118dc102c8f9bd7dd060d6"}, + {file = "contourpy-1.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb76c1a154b83991a3cbbf0dfeb26ec2833ad56f95540b442c73950af2013750"}, + {file = "contourpy-1.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:44a29502ca9c7b5ba389e620d44f2fbe792b1fb5734e8b931ad307071ec58c53"}, + {file = "contourpy-1.3.1.tar.gz", hash = "sha256:dfd97abd83335045a913e3bcc4a09c0ceadbe66580cf573fe961f4a825efa699"}, +] + +[package.dependencies] +numpy = ">=1.23" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +groups = ["dev"] +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "2.1.1" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + +[[package]] +name = "executing" +version = "2.2.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, + {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] + +[[package]] +name = "flake8" +version = "7.1.2" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +groups = ["dev"] +files = [ + {file = "flake8-7.1.2-py2.py3-none-any.whl", hash = "sha256:1cbc62e65536f65e6d754dfe6f1bada7f5cf392d6f5db3c2b85892466c3e7c1a"}, + {file = "flake8-7.1.2.tar.gz", hash = "sha256:c586ffd0b41540951ae41af572e6790dbd49fc12b3aa2541685d253d9bd504bd"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "fonttools" +version = "4.56.0" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:331954d002dbf5e704c7f3756028e21db07097c19722569983ba4d74df014000"}, + {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d1613abd5af2f93c05867b3a3759a56e8bf97eb79b1da76b2bc10892f96ff16"}, + {file = "fonttools-4.56.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:705837eae384fe21cee5e5746fd4f4b2f06f87544fa60f60740007e0aa600311"}, + {file = "fonttools-4.56.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc871904a53a9d4d908673c6faa15689874af1c7c5ac403a8e12d967ebd0c0dc"}, + {file = "fonttools-4.56.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:38b947de71748bab150259ee05a775e8a0635891568e9fdb3cdd7d0e0004e62f"}, + {file = "fonttools-4.56.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:86b2a1013ef7a64d2e94606632683f07712045ed86d937c11ef4dde97319c086"}, + {file = "fonttools-4.56.0-cp310-cp310-win32.whl", hash = "sha256:133bedb9a5c6376ad43e6518b7e2cd2f866a05b1998f14842631d5feb36b5786"}, + {file = "fonttools-4.56.0-cp310-cp310-win_amd64.whl", hash = "sha256:17f39313b649037f6c800209984a11fc256a6137cbe5487091c6c7187cae4685"}, + {file = "fonttools-4.56.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ef04bc7827adb7532be3d14462390dd71287644516af3f1e67f1e6ff9c6d6df"}, + {file = "fonttools-4.56.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ffda9b8cd9cb8b301cae2602ec62375b59e2e2108a117746f12215145e3f786c"}, + {file = "fonttools-4.56.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e993e8db36306cc3f1734edc8ea67906c55f98683d6fd34c3fc5593fdbba4c"}, + {file = "fonttools-4.56.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:003548eadd674175510773f73fb2060bb46adb77c94854af3e0cc5bc70260049"}, + {file = "fonttools-4.56.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd9825822e7bb243f285013e653f6741954d8147427aaa0324a862cdbf4cbf62"}, + {file = "fonttools-4.56.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b23d30a2c0b992fb1c4f8ac9bfde44b5586d23457759b6cf9a787f1a35179ee0"}, + {file = "fonttools-4.56.0-cp311-cp311-win32.whl", hash = "sha256:47b5e4680002ae1756d3ae3b6114e20aaee6cc5c69d1e5911f5ffffd3ee46c6b"}, + {file = "fonttools-4.56.0-cp311-cp311-win_amd64.whl", hash = "sha256:14a3e3e6b211660db54ca1ef7006401e4a694e53ffd4553ab9bc87ead01d0f05"}, + {file = "fonttools-4.56.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d6f195c14c01bd057bc9b4f70756b510e009c83c5ea67b25ced3e2c38e6ee6e9"}, + {file = "fonttools-4.56.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fa760e5fe8b50cbc2d71884a1eff2ed2b95a005f02dda2fa431560db0ddd927f"}, + {file = "fonttools-4.56.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d54a45d30251f1d729e69e5b675f9a08b7da413391a1227781e2a297fa37f6d2"}, + {file = "fonttools-4.56.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:661a8995d11e6e4914a44ca7d52d1286e2d9b154f685a4d1f69add8418961563"}, + {file = "fonttools-4.56.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d94449ad0a5f2a8bf5d2f8d71d65088aee48adbe45f3c5f8e00e3ad861ed81a"}, + {file = "fonttools-4.56.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f59746f7953f69cc3290ce2f971ab01056e55ddd0fb8b792c31a8acd7fee2d28"}, + {file = "fonttools-4.56.0-cp312-cp312-win32.whl", hash = "sha256:bce60f9a977c9d3d51de475af3f3581d9b36952e1f8fc19a1f2254f1dda7ce9c"}, + {file = "fonttools-4.56.0-cp312-cp312-win_amd64.whl", hash = "sha256:300c310bb725b2bdb4f5fc7e148e190bd69f01925c7ab437b9c0ca3e1c7cd9ba"}, + {file = "fonttools-4.56.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f20e2c0dfab82983a90f3d00703ac0960412036153e5023eed2b4641d7d5e692"}, + {file = "fonttools-4.56.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f36a0868f47b7566237640c026c65a86d09a3d9ca5df1cd039e30a1da73098a0"}, + {file = "fonttools-4.56.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62b4c6802fa28e14dba010e75190e0e6228513573f1eeae57b11aa1a39b7e5b1"}, + {file = "fonttools-4.56.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a05d1f07eb0a7d755fbe01fee1fd255c3a4d3730130cf1bfefb682d18fd2fcea"}, + {file = "fonttools-4.56.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0073b62c3438cf0058488c002ea90489e8801d3a7af5ce5f7c05c105bee815c3"}, + {file = "fonttools-4.56.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cad98c94833465bcf28f51c248aaf07ca022efc6a3eba750ad9c1e0256d278"}, + {file = "fonttools-4.56.0-cp313-cp313-win32.whl", hash = "sha256:d0cb73ccf7f6d7ca8d0bc7ea8ac0a5b84969a41c56ac3ac3422a24df2680546f"}, + {file = "fonttools-4.56.0-cp313-cp313-win_amd64.whl", hash = "sha256:62cc1253827d1e500fde9dbe981219fea4eb000fd63402283472d38e7d8aa1c6"}, + {file = "fonttools-4.56.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3fd3fccb7b9adaaecfa79ad51b759f2123e1aba97f857936ce044d4f029abd71"}, + {file = "fonttools-4.56.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:193b86e9f769320bc98ffdb42accafb5d0c8c49bd62884f1c0702bc598b3f0a2"}, + {file = "fonttools-4.56.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e81c1cc80c1d8bf071356cc3e0e25071fbba1c75afc48d41b26048980b3c771"}, + {file = "fonttools-4.56.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9270505a19361e81eecdbc2c251ad1e1a9a9c2ad75fa022ccdee533f55535dc"}, + {file = "fonttools-4.56.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53f5e9767978a4daf46f28e09dbeb7d010319924ae622f7b56174b777258e5ba"}, + {file = "fonttools-4.56.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9da650cb29bc098b8cfd15ef09009c914b35c7986c8fa9f08b51108b7bc393b4"}, + {file = "fonttools-4.56.0-cp38-cp38-win32.whl", hash = "sha256:965d0209e6dbdb9416100123b6709cb13f5232e2d52d17ed37f9df0cc31e2b35"}, + {file = "fonttools-4.56.0-cp38-cp38-win_amd64.whl", hash = "sha256:654ac4583e2d7c62aebc6fc6a4c6736f078f50300e18aa105d87ce8925cfac31"}, + {file = "fonttools-4.56.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca7962e8e5fc047cc4e59389959843aafbf7445b6c08c20d883e60ced46370a5"}, + {file = "fonttools-4.56.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1af375734018951c31c0737d04a9d5fd0a353a0253db5fbed2ccd44eac62d8c"}, + {file = "fonttools-4.56.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:442ad4122468d0e47d83bc59d0e91b474593a8c813839e1872e47c7a0cb53b10"}, + {file = "fonttools-4.56.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf4f8d2a30b454ac682e12c61831dcb174950c406011418e739de592bbf8f76"}, + {file = "fonttools-4.56.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96a4271f63a615bcb902b9f56de00ea225d6896052c49f20d0c91e9f43529a29"}, + {file = "fonttools-4.56.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6c1d38642ca2dddc7ae992ef5d026e5061a84f10ff2b906be5680ab089f55bb8"}, + {file = "fonttools-4.56.0-cp39-cp39-win32.whl", hash = "sha256:2d351275f73ebdd81dd5b09a8b8dac7a30f29a279d41e1c1192aedf1b6dced40"}, + {file = "fonttools-4.56.0-cp39-cp39-win_amd64.whl", hash = "sha256:d6ca96d1b61a707ba01a43318c9c40aaf11a5a568d1e61146fafa6ab20890793"}, + {file = "fonttools-4.56.0-py3-none-any.whl", hash = "sha256:1088182f68c303b50ca4dc0c82d42083d176cba37af1937e1a976a31149d4d14"}, + {file = "fonttools-4.56.0.tar.gz", hash = "sha256:a114d1567e1a1586b7e9e7fc2ff686ca542a82769a296cef131e4c4af51e58f4"}, +] + +[package.extras] +all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr ; sys_platform == \"darwin\""] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""] +woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.7" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipython" +version = "8.32.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "ipython-8.32.0-py3-none-any.whl", hash = "sha256:cae85b0c61eff1fc48b0a8002de5958b6528fa9c8defb1894da63f42613708aa"}, + {file = "ipython-8.32.0.tar.gz", hash = "sha256:be2c91895b0b9ea7ba49d33b23e2040c352b33eb6a519cca7ce6e0c743444251"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack_data = "*" +traitlets = ">=5.13.0" +typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "jedi" +version = "0.19.2" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, +] + +[package.dependencies] +parso = ">=0.8.4,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] + +[[package]] +name = "jellyfish" +version = "1.1.3" +description = "Approximate and phonetic matching of strings." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jellyfish-1.1.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e73a1905eddb66d9bd383531f0ae5f21b95c891ea5ecd6d9e056310ed76ce126"}, + {file = "jellyfish-1.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e49d34c7114c08def593da522ada324b1481d8baf02184a4c8e1a32604897a41"}, + {file = "jellyfish-1.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c14c5f5f7e1f1248923498dcaaf2534b43044708b21128ac80f0cb862beaf780"}, + {file = "jellyfish-1.1.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f57af6adb802e7206c85c060597a72297eefcf60471107fbfe96b48dd1f7500b"}, + {file = "jellyfish-1.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db429ecb0d2cd4e83c4082ec36cb2ae4aa251226d6c24948b6dc042aaf149888"}, + {file = "jellyfish-1.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:38962cda1a71da0fea76350566b852062fedb06ca3a778eab9aa2a562231304a"}, + {file = "jellyfish-1.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:39c889621269087e01bace9f844c164dc1637c437f1cda899f52720a560f7cc2"}, + {file = "jellyfish-1.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:273bb289e9675e13bfd48db290e30f65582d6e6c3865e2b76d37d3fea40a1d2d"}, + {file = "jellyfish-1.1.3-cp310-cp310-win32.whl", hash = "sha256:21ae3d7b38e9fb76880c0cc24204db6f47e877624acc9d66a1456d7b6b6b6100"}, + {file = "jellyfish-1.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:ff04aaf51b1c8fc42d8f0bd30545f7af0b516f8890e61a8e8b31218a28fec2de"}, + {file = "jellyfish-1.1.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:429e82b6527a0743358e69f10f7a913ec7c61d26bf21fe2cc2974b63dddb3384"}, + {file = "jellyfish-1.1.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d2f1a39d2781713dea8e7402a2085c8a8f0f1b7872daf1ebfd84fde80130a20e"}, + {file = "jellyfish-1.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630618c5a36f40ef43b8e18cef98f40b2ec60661cb45308eb35a3950e1faaf58"}, + {file = "jellyfish-1.1.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b1094274c11bfede3284e960027d2534145335e51795e043b0d8fd71c91736a"}, + {file = "jellyfish-1.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5398d872fb1588ac0fbf03f4e2137aeac15798b8e7e0f6db38b791360d5e259c"}, + {file = "jellyfish-1.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:db18014834731d17490d8a27be19e75ae2e3ff7d5f40792b989e915b0c2bda9d"}, + {file = "jellyfish-1.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:08b6be37d5e99ba6e82cb45ca6e90d1fc10a6f3d7df7d3c481d1fedae8057c39"}, + {file = "jellyfish-1.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bcd4ada313160333f5a7e482baa3645fd44894db8034396c46e13f729cce327f"}, + {file = "jellyfish-1.1.3-cp311-cp311-win32.whl", hash = "sha256:388abeebce8e85a2671b14d3e6fd257db470c058a446e54273495f04ac3d8b8e"}, + {file = "jellyfish-1.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:41410d38726eac341943ff9b7505d58dbf341b50d1de5f6067323fa72b8fc13d"}, + {file = "jellyfish-1.1.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:89d5ccaf5adade69a4f80859f0fa13810064fe35aed57f42b210f2b30c3fbb6a"}, + {file = "jellyfish-1.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:29aa2be83c3346e1cc8d84b765ac4f2d4e7830e8ddb8369caf7c8e7d4c2c58fe"}, + {file = "jellyfish-1.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38902e5b5a5015a5add877b9317bc8aeccee4590fbd955016a43f9147d9a1afd"}, + {file = "jellyfish-1.1.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1c2c11ba786d1dbf5670994ba2aa0cef09a61955e298a5acb1ab3ef9e7a88d3"}, + {file = "jellyfish-1.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b098603b3883cc9f38f121c872ba0cc4600d9f92020c5ddcb8f4fe84d941aa5f"}, + {file = "jellyfish-1.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cc3460dc0470ba277c87e6fdd3febcf052219253de748b27150884574e7179ec"}, + {file = "jellyfish-1.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7847238733fe9b62894270a8adeb50b0f28f79bef2ff78f8d39cfc161507d584"}, + {file = "jellyfish-1.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d07f99fa95624049390eac38604edfe48aa2da7fbec96ce74a7dfaa97faf2bd"}, + {file = "jellyfish-1.1.3-cp312-cp312-win32.whl", hash = "sha256:c98eec90d704c34f5309f65793d84edd029bc2b641dd03c3b90c2cfe00c22a4e"}, + {file = "jellyfish-1.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:c8c70b1cc92ee15031db16db0d2ca7d5841744f5f626a35c29c3dd7b4ea7002b"}, + {file = "jellyfish-1.1.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a1907b0382b45650a221fd6cc85afed9f75f08063e9b5c3a7a616bf95fef91be"}, + {file = "jellyfish-1.1.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c0946909f84a1059d241fcee01032840e862746547390bfb0a4cf2f59a387ee"}, + {file = "jellyfish-1.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2105dfb2387a203f77a49cf12d38b4157809377c3a069d8c419c3d331c181816"}, + {file = "jellyfish-1.1.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f852aa6753f5fbc6044e7d961df8015adbe68a5dd0d65ebef8c2ed24c0c35d13"}, + {file = "jellyfish-1.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3afb5e8f327675be57f2121fb7442305c919a07317dc35fa74f6d05fe6f221e7"}, + {file = "jellyfish-1.1.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:93c16994e01b772d3d8049c68c5c937f7a810b39a851c15b528379f0258c54d9"}, + {file = "jellyfish-1.1.3-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:e6d61f1dac3d3b2121b70159a4b2cbcff92834d006e69961cf9bbb1841461d00"}, + {file = "jellyfish-1.1.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:5ad5e8156b58caf2e80020ac54a84eb4066ff082362b27cd7af7fa732a7181d6"}, + {file = "jellyfish-1.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:2113195a48ce8cb99d2bb2c6d9b119f58025dde1d727101518e7150c093a66da"}, + {file = "jellyfish-1.1.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:54eaa16afed12ab166719638b285b3713ce4d039de5d1a69b5a068f845f0aa45"}, + {file = "jellyfish-1.1.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8eeb078582b56f458cccf66fc195287adec3957c62e669c8d0a17346218c3e67"}, + {file = "jellyfish-1.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:287c12adfcf75e86c83aed3a1aeef4c17b251bb3767fb2167163652beb88b5a1"}, + {file = "jellyfish-1.1.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa8be98de027f9b1cd72e8aee7801e340e2603efd03f509ec2fe084ad9df20b4"}, + {file = "jellyfish-1.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2746ee8dfb4f9dcf24a30cfdf9f392e32bf579b8e3fc3e1e38fd5ac5be421897"}, + {file = "jellyfish-1.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b15b60a8b6386c00bbd22e9253d046949c7803d8410c7b194a779f18d8ee6e34"}, + {file = "jellyfish-1.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:196d08e0ecf903904e7b043bc7192e4533976a1c1bc3b50d44e0fb09291e52e2"}, + {file = "jellyfish-1.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f6497cb2e976c0336860e865e61d0ca0f37651dcc72509326783dbcaddfb6f2c"}, + {file = "jellyfish-1.1.3-cp38-cp38-win32.whl", hash = "sha256:aec12e181aefcfe0c6e634e26bdde05b22c47c99b042dd49432c9346c17f316e"}, + {file = "jellyfish-1.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:ed6e8237ac7987daf8df5db6ad6b72b3fe00c38beb291f03122328d9fcf2eccc"}, + {file = "jellyfish-1.1.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2382a385e47e214beacd63049d7cc197472b0b3c9011e4a49410c2a843c6e2c7"}, + {file = "jellyfish-1.1.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e87d7f3db29b5893c7db869196837b39ab096f9a27bb382a56f9b8fb604d2f1f"}, + {file = "jellyfish-1.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7d73bcd96b15550603517ac0ad11475fc21cb431582b6698464247f0dfba666"}, + {file = "jellyfish-1.1.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3335c7b7ff947f24f6cd7bc60b40ca9e93f9a2308b7f90c390ca75a6da081b97"}, + {file = "jellyfish-1.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d09f407e0f88142167b45e2ffff386b6c7d8bf8ad34ec30c14cb33cce26535"}, + {file = "jellyfish-1.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:065e60adae1bfb6a0706015892585d37a26bf5708076d19216888aeca1a7e43d"}, + {file = "jellyfish-1.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8743490c0c1dc00d3b7d211ce3b2150ac4568e891bfdeb61b0a91c5dc5765826"}, + {file = "jellyfish-1.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aaa05ba1650d84316f42a7347b2afd6959c804728f5c2b7c90496827e10f705f"}, + {file = "jellyfish-1.1.3-cp39-cp39-win32.whl", hash = "sha256:44fe82f01fe26bf7663d1169af9d28aa04d63e9c497da302aa22e2faef65a588"}, + {file = "jellyfish-1.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:66114f9f58e9b2827716a7e004ee9816de60fa00f6cfc906d187de03570f4726"}, + {file = "jellyfish-1.1.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd107e94bb1dfafabe6d03ba7a3e471acfa2786eb7d4fd3eaf402d7dc9bd755"}, + {file = "jellyfish-1.1.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:975cfec3ee2b9e1753aea336f7f65ab609e73a2df36e0f5f30c99b54670ebd9f"}, + {file = "jellyfish-1.1.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58623f0f1e65deca98b1ddd380d33d5f56202fa2d9f136bcc26951705bd61aac"}, + {file = "jellyfish-1.1.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0d724aa11b4625e7cd36958c20dd716723da9e8a11548f59f6e771f298dd79ff"}, + {file = "jellyfish-1.1.3-pp310-pypy310_pp73-musllinux_1_1_i686.whl", hash = "sha256:fe486886f61bc8539276efaf06687dcefd5a768674eba74e4254304ce1222360"}, + {file = "jellyfish-1.1.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:8ccb2ac6f0ab98f2f41b08757bc9f90110ccf2cc327a08d8859f2541f45d08d4"}, + {file = "jellyfish-1.1.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36e865f0ddf5cda43fc49fc4a61fecb1c6aeb8699c9e52fd6e1f6321ed564b62"}, + {file = "jellyfish-1.1.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7dc349fe799c02a4c9b0fe19204886afb83b563cd7157c7e9f44173607fd8984"}, + {file = "jellyfish-1.1.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670d0f84bfde3469053eac53db999ca7b3a900fecf712919d7f9645d58d1becf"}, + {file = "jellyfish-1.1.3-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:538a76ed655a7fdf6e6b8b3f64712193a563970a0e8b7c6488adfb8959bd434e"}, + {file = "jellyfish-1.1.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46ad439689206953f2d490a66bf5145bd3b910836e8dd22c380f584b9ef910da"}, + {file = "jellyfish-1.1.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c159b8360c2fce373b9fdcec6c3cb3fa29e1d01ef02f5476fae714ef34ace6ac"}, + {file = "jellyfish-1.1.3-pp38-pypy38_pp73-musllinux_1_1_i686.whl", hash = "sha256:bedf3414ab15f743450689d0c065e4898091a5763241de7b25a3f1ed0827c833"}, + {file = "jellyfish-1.1.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:60d51008f36285bc592373e639386296a3e7fbd2d634e1c6f972983cbbd243af"}, + {file = "jellyfish-1.1.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75bef7edada9fe367bbaf05fc5989bcb95f28fb80cca45fe28dcddc3762cb770"}, + {file = "jellyfish-1.1.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:368041ec16ed4fed0c1f30bfaca4bf8041ee688fd2f22ee0d7e6a3b37a2ef8bf"}, + {file = "jellyfish-1.1.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf1d3c6eab17d59d01b9210fc7fa96da2543726a83053d76b9af6655b4da3e7"}, + {file = "jellyfish-1.1.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c08ee4267db2f90779d0ca5e2f89fd1b49730e622446dc9479edaf218d1261c"}, + {file = "jellyfish-1.1.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cce030c8a8b6db05ec3be662fd06a813fb3eeee4f568c63d6ed18e1d96667578"}, + {file = "jellyfish-1.1.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cca438727233786f314fc0739aae153cbad592755920c96dcc1a965cf1047016"}, + {file = "jellyfish-1.1.3-pp39-pypy39_pp73-musllinux_1_1_i686.whl", hash = "sha256:a03c6891bc909d9cc2802d5677dfbc39a8c730b50591395fdf7e408612a1845b"}, + {file = "jellyfish-1.1.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:35cad0b7e914c2da4f1204f3f7e016784bdfc1952cd630e000e20641238f71e7"}, + {file = "jellyfish-1.1.3.tar.gz", hash = "sha256:650ba1ddabd716499f85fae0e1f3fa3e6532a69b68985d9294e86a1e04f08f9f"}, +] + +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + +[[package]] +name = "jsonschema" +version = "4.23.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] + +[[package]] +name = "jsonschema-specifications" +version = "2024.10.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "kiwisolver" +version = "1.4.8" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, + {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, + {file = "kiwisolver-1.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce2cf1e5688edcb727fdf7cd1bbd0b6416758996826a8be1d958f91880d0809d"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c8bf637892dc6e6aad2bc6d4d69d08764166e5e3f69d469e55427b6ac001b19d"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:034d2c891f76bd3edbdb3ea11140d8510dca675443da7304205a2eaa45d8334c"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47b28d1dfe0793d5e96bce90835e17edf9a499b53969b03c6c47ea5985844c3"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb158fe28ca0c29f2260cca8c43005329ad58452c36f0edf298204de32a9a3ed"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5536185fce131780ebd809f8e623bf4030ce1b161353166c49a3c74c287897f"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:369b75d40abedc1da2c1f4de13f3482cb99e3237b38726710f4a793432b1c5ff"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:641f2ddf9358c80faa22e22eb4c9f54bd3f0e442e038728f500e3b978d00aa7d"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d561d2d8883e0819445cfe58d7ddd673e4015c3c57261d7bdcd3710d0d14005c"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1732e065704b47c9afca7ffa272f845300a4eb959276bf6970dc07265e73b605"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bcb1ebc3547619c3b58a39e2448af089ea2ef44b37988caf432447374941574e"}, + {file = "kiwisolver-1.4.8-cp310-cp310-win_amd64.whl", hash = "sha256:89c107041f7b27844179ea9c85d6da275aa55ecf28413e87624d033cf1f6b751"}, + {file = "kiwisolver-1.4.8-cp310-cp310-win_arm64.whl", hash = "sha256:b5773efa2be9eb9fcf5415ea3ab70fc785d598729fd6057bea38d539ead28271"}, + {file = "kiwisolver-1.4.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a4d3601908c560bdf880f07d94f31d734afd1bb71e96585cace0e38ef44c6d84"}, + {file = "kiwisolver-1.4.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856b269c4d28a5c0d5e6c1955ec36ebfd1651ac00e1ce0afa3e28da95293b561"}, + {file = "kiwisolver-1.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2b9a96e0f326205af81a15718a9073328df1173a2619a68553decb7097fd5d7"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5020c83e8553f770cb3b5fc13faac40f17e0b205bd237aebd21d53d733adb03"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dace81d28c787956bfbfbbfd72fdcef014f37d9b48830829e488fdb32b49d954"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11e1022b524bd48ae56c9b4f9296bce77e15a2e42a502cceba602f804b32bb79"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b9b4d2892fefc886f30301cdd80debd8bb01ecdf165a449eb6e78f79f0fabd6"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a96c0e790ee875d65e340ab383700e2b4891677b7fcd30a699146f9384a2bb0"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23454ff084b07ac54ca8be535f4174170c1094a4cff78fbae4f73a4bcc0d4dab"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:87b287251ad6488e95b4f0b4a79a6d04d3ea35fde6340eb38fbd1ca9cd35bbbc"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b21dbe165081142b1232a240fc6383fd32cdd877ca6cc89eab93e5f5883e1c25"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:768cade2c2df13db52475bd28d3a3fac8c9eff04b0e9e2fda0f3760f20b3f7fc"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d47cfb2650f0e103d4bf68b0b5804c68da97272c84bb12850d877a95c056bd67"}, + {file = "kiwisolver-1.4.8-cp311-cp311-win_amd64.whl", hash = "sha256:ed33ca2002a779a2e20eeb06aea7721b6e47f2d4b8a8ece979d8ba9e2a167e34"}, + {file = "kiwisolver-1.4.8-cp311-cp311-win_arm64.whl", hash = "sha256:16523b40aab60426ffdebe33ac374457cf62863e330a90a0383639ce14bf44b2"}, + {file = "kiwisolver-1.4.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d6af5e8815fd02997cb6ad9bbed0ee1e60014438ee1a5c2444c96f87b8843502"}, + {file = "kiwisolver-1.4.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bade438f86e21d91e0cf5dd7c0ed00cda0f77c8c1616bd83f9fc157fa6760d31"}, + {file = "kiwisolver-1.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b83dc6769ddbc57613280118fb4ce3cd08899cc3369f7d0e0fab518a7cf37fdb"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111793b232842991be367ed828076b03d96202c19221b5ebab421ce8bcad016f"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:257af1622860e51b1a9d0ce387bf5c2c4f36a90594cb9514f55b074bcc787cfc"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b5637c3f316cab1ec1c9a12b8c5f4750a4c4b71af9157645bf32830e39c03a"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782bb86f245ec18009890e7cb8d13a5ef54dcf2ebe18ed65f795e635a96a1c6a"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc978a80a0db3a66d25767b03688f1147a69e6237175c0f4ffffaaedf744055a"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:36dbbfd34838500a31f52c9786990d00150860e46cd5041386f217101350f0d3"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:eaa973f1e05131de5ff3569bbba7f5fd07ea0595d3870ed4a526d486fe57fa1b"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a66f60f8d0c87ab7f59b6fb80e642ebb29fec354a4dfad687ca4092ae69d04f4"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858416b7fb777a53f0c59ca08190ce24e9abbd3cffa18886a5781b8e3e26f65d"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:085940635c62697391baafaaeabdf3dd7a6c3643577dde337f4d66eba021b2b8"}, + {file = "kiwisolver-1.4.8-cp312-cp312-win_amd64.whl", hash = "sha256:01c3d31902c7db5fb6182832713d3b4122ad9317c2c5877d0539227d96bb2e50"}, + {file = "kiwisolver-1.4.8-cp312-cp312-win_arm64.whl", hash = "sha256:a3c44cb68861de93f0c4a8175fbaa691f0aa22550c331fefef02b618a9dcb476"}, + {file = "kiwisolver-1.4.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1c8ceb754339793c24aee1c9fb2485b5b1f5bb1c2c214ff13368431e51fc9a09"}, + {file = "kiwisolver-1.4.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a62808ac74b5e55a04a408cda6156f986cefbcf0ada13572696b507cc92fa1"}, + {file = "kiwisolver-1.4.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68269e60ee4929893aad82666821aaacbd455284124817af45c11e50a4b42e3c"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d142fba9c464bc3bbfeff15c96eab0e7310343d6aefb62a79d51421fcc5f1b"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc373e0eef45b59197de815b1b28ef89ae3955e7722cc9710fb91cd77b7f47"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77e6f57a20b9bd4e1e2cedda4d0b986ebd0216236f0106e55c28aea3d3d69b16"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08e77738ed7538f036cd1170cbed942ef749137b1311fa2bbe2a7fda2f6bf3cc"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5ce1e481a74b44dd5e92ff03ea0cb371ae7a0268318e202be06c8f04f4f1246"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fc2ace710ba7c1dfd1a3b42530b62b9ceed115f19a1656adefce7b1782a37794"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3452046c37c7692bd52b0e752b87954ef86ee2224e624ef7ce6cb21e8c41cc1b"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7e9a60b50fe8b2ec6f448fe8d81b07e40141bfced7f896309df271a0b92f80f3"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:918139571133f366e8362fa4a297aeba86c7816b7ecf0bc79168080e2bd79957"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e063ef9f89885a1d68dd8b2e18f5ead48653176d10a0e324e3b0030e3a69adeb"}, + {file = "kiwisolver-1.4.8-cp313-cp313-win_amd64.whl", hash = "sha256:a17b7c4f5b2c51bb68ed379defd608a03954a1845dfed7cc0117f1cc8a9b7fd2"}, + {file = "kiwisolver-1.4.8-cp313-cp313-win_arm64.whl", hash = "sha256:3cd3bc628b25f74aedc6d374d5babf0166a92ff1317f46267f12d2ed54bc1d30"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:370fd2df41660ed4e26b8c9d6bbcad668fbe2560462cba151a721d49e5b6628c"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:84a2f830d42707de1d191b9490ac186bf7997a9495d4e9072210a1296345f7dc"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7a3ad337add5148cf51ce0b55642dc551c0b9d6248458a757f98796ca7348712"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7506488470f41169b86d8c9aeff587293f530a23a23a49d6bc64dab66bedc71e"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f0121b07b356a22fb0414cec4666bbe36fd6d0d759db3d37228f496ed67c880"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6d6bd87df62c27d4185de7c511c6248040afae67028a8a22012b010bc7ad062"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:291331973c64bb9cce50bbe871fb2e675c4331dab4f31abe89f175ad7679a4d7"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:893f5525bb92d3d735878ec00f781b2de998333659507d29ea4466208df37bed"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b47a465040146981dc9db8647981b8cb96366fbc8d452b031e4f8fdffec3f26d"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:99cea8b9dd34ff80c521aef46a1dddb0dcc0283cf18bde6d756f1e6f31772165"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:151dffc4865e5fe6dafce5480fab84f950d14566c480c08a53c663a0020504b6"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:577facaa411c10421314598b50413aa1ebcf5126f704f1e5d72d7e4e9f020d90"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:be4816dc51c8a471749d664161b434912eee82f2ea66bd7628bd14583a833e85"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e7a019419b7b510f0f7c9dceff8c5eae2392037eae483a7f9162625233802b0a"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:286b18e86682fd2217a48fc6be6b0f20c1d0ed10958d8dc53453ad58d7be0bf8"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4191ee8dfd0be1c3666ccbac178c5a05d5f8d689bbe3fc92f3c4abec817f8fe0"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd2785b9391f2873ad46088ed7599a6a71e762e1ea33e87514b1a441ed1da1c"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c07b29089b7ba090b6f1a669f1411f27221c3662b3a1b7010e67b59bb5a6f10b"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:65ea09a5a3faadd59c2ce96dc7bf0f364986a315949dc6374f04396b0d60e09b"}, + {file = "kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e"}, +] + +[[package]] +name = "matplotlib" +version = "3.10.0" +description = "Python plotting package" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "matplotlib-3.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2c5829a5a1dd5a71f0e31e6e8bb449bc0ee9dbfb05ad28fc0c6b55101b3a4be6"}, + {file = "matplotlib-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2a43cbefe22d653ab34bb55d42384ed30f611bcbdea1f8d7f431011a2e1c62e"}, + {file = "matplotlib-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:607b16c8a73943df110f99ee2e940b8a1cbf9714b65307c040d422558397dac5"}, + {file = "matplotlib-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01d2b19f13aeec2e759414d3bfe19ddfb16b13a1250add08d46d5ff6f9be83c6"}, + {file = "matplotlib-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e6c6461e1fc63df30bf6f80f0b93f5b6784299f721bc28530477acd51bfc3d1"}, + {file = "matplotlib-3.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:994c07b9d9fe8d25951e3202a68c17900679274dadfc1248738dcfa1bd40d7f3"}, + {file = "matplotlib-3.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:fd44fc75522f58612ec4a33958a7e5552562b7705b42ef1b4f8c0818e304a363"}, + {file = "matplotlib-3.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c58a9622d5dbeb668f407f35f4e6bfac34bb9ecdcc81680c04d0258169747997"}, + {file = "matplotlib-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:845d96568ec873be63f25fa80e9e7fae4be854a66a7e2f0c8ccc99e94a8bd4ef"}, + {file = "matplotlib-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5439f4c5a3e2e8eab18e2f8c3ef929772fd5641876db71f08127eed95ab64683"}, + {file = "matplotlib-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4673ff67a36152c48ddeaf1135e74ce0d4bce1bbf836ae40ed39c29edf7e2765"}, + {file = "matplotlib-3.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:7e8632baebb058555ac0cde75db885c61f1212e47723d63921879806b40bec6a"}, + {file = "matplotlib-3.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4659665bc7c9b58f8c00317c3c2a299f7f258eeae5a5d56b4c64226fca2f7c59"}, + {file = "matplotlib-3.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d44cb942af1693cced2604c33a9abcef6205601c445f6d0dc531d813af8a2f5a"}, + {file = "matplotlib-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a994f29e968ca002b50982b27168addfd65f0105610b6be7fa515ca4b5307c95"}, + {file = "matplotlib-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b0558bae37f154fffda54d779a592bc97ca8b4701f1c710055b609a3bac44c8"}, + {file = "matplotlib-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:503feb23bd8c8acc75541548a1d709c059b7184cde26314896e10a9f14df5f12"}, + {file = "matplotlib-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c40ba2eb08b3f5de88152c2333c58cee7edcead0a2a0d60fcafa116b17117adc"}, + {file = "matplotlib-3.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96f2886f5c1e466f21cc41b70c5a0cd47bfa0015eb2d5793c88ebce658600e25"}, + {file = "matplotlib-3.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:12eaf48463b472c3c0f8dbacdbf906e573013df81a0ab82f0616ea4b11281908"}, + {file = "matplotlib-3.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fbbabc82fde51391c4da5006f965e36d86d95f6ee83fb594b279564a4c5d0d2"}, + {file = "matplotlib-3.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad2e15300530c1a94c63cfa546e3b7864bd18ea2901317bae8bbf06a5ade6dcf"}, + {file = "matplotlib-3.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3547d153d70233a8496859097ef0312212e2689cdf8d7ed764441c77604095ae"}, + {file = "matplotlib-3.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:c55b20591ced744aa04e8c3e4b7543ea4d650b6c3c4b208c08a05b4010e8b442"}, + {file = "matplotlib-3.10.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9ade1003376731a971e398cc4ef38bb83ee8caf0aee46ac6daa4b0506db1fd06"}, + {file = "matplotlib-3.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95b710fea129c76d30be72c3b38f330269363fbc6e570a5dd43580487380b5ff"}, + {file = "matplotlib-3.10.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdbaf909887373c3e094b0318d7ff230b2ad9dcb64da7ade654182872ab2593"}, + {file = "matplotlib-3.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d907fddb39f923d011875452ff1eca29a9e7f21722b873e90db32e5d8ddff12e"}, + {file = "matplotlib-3.10.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3b427392354d10975c1d0f4ee18aa5844640b512d5311ef32efd4dd7db106ede"}, + {file = "matplotlib-3.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5fd41b0ec7ee45cd960a8e71aea7c946a28a0b8a4dcee47d2856b2af051f334c"}, + {file = "matplotlib-3.10.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:81713dd0d103b379de4516b861d964b1d789a144103277769238c732229d7f03"}, + {file = "matplotlib-3.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:359f87baedb1f836ce307f0e850d12bb5f1936f70d035561f90d41d305fdacea"}, + {file = "matplotlib-3.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae80dc3a4add4665cf2faa90138384a7ffe2a4e37c58d83e115b54287c4f06ef"}, + {file = "matplotlib-3.10.0.tar.gz", hash = "sha256:b886d02a581b96704c9d1ffe55709e49b4d2d52709ccebc4be42db856e511278"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +kiwisolver = ">=1.3.1" +numpy = ">=1.23" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[package.extras] +dev = ["meson-python (>=0.13.1,<0.17.0)", "pybind11 (>=2.13.2,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "networkx" +version = "3.4.2" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f"}, + {file = "networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1"}, +] + +[package.extras] +default = ["matplotlib (>=3.7)", "numpy (>=1.24)", "pandas (>=2.0)", "scipy (>=1.10,!=1.11.0,!=1.11.1)"] +developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.15)", "sphinx (>=7.3)", "sphinx-gallery (>=0.16)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=1.9)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] + +[[package]] +name = "numpy" +version = "2.2.3" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "numpy-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cbc6472e01952d3d1b2772b720428f8b90e2deea8344e854df22b0618e9cce71"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdfe0c22692a30cd830c0755746473ae66c4a8f2e7bd508b35fb3b6a0813d787"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:e37242f5324ffd9f7ba5acf96d774f9276aa62a966c0bad8dae692deebec7716"}, + {file = "numpy-2.2.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:95172a21038c9b423e68be78fd0be6e1b97674cde269b76fe269a5dfa6fadf0b"}, + {file = "numpy-2.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b47c440210c5d1d67e1cf434124e0b5c395eee1f5806fdd89b553ed1acd0a3"}, + {file = "numpy-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0391ea3622f5c51a2e29708877d56e3d276827ac5447d7f45e9bc4ade8923c52"}, + {file = "numpy-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f6b3dfc7661f8842babd8ea07e9897fe3d9b69a1d7e5fbb743e4160f9387833b"}, + {file = "numpy-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1ad78ce7f18ce4e7df1b2ea4019b5817a2f6a8a16e34ff2775f646adce0a5027"}, + {file = "numpy-2.2.3-cp310-cp310-win32.whl", hash = "sha256:5ebeb7ef54a7be11044c33a17b2624abe4307a75893c001a4800857956b41094"}, + {file = "numpy-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:596140185c7fa113563c67c2e894eabe0daea18cf8e33851738c19f70ce86aeb"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:16372619ee728ed67a2a606a614f56d3eabc5b86f8b615c79d01957062826ca8"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5521a06a3148686d9269c53b09f7d399a5725c47bbb5b35747e1cb76326b714b"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:7c8dde0ca2f77828815fd1aedfdf52e59071a5bae30dac3b4da2a335c672149a"}, + {file = "numpy-2.2.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:77974aba6c1bc26e3c205c2214f0d5b4305bdc719268b93e768ddb17e3fdd636"}, + {file = "numpy-2.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d42f9c36d06440e34226e8bd65ff065ca0963aeecada587b937011efa02cdc9d"}, + {file = "numpy-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2712c5179f40af9ddc8f6727f2bd910ea0eb50206daea75f58ddd9fa3f715bb"}, + {file = "numpy-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c8b0451d2ec95010d1db8ca733afc41f659f425b7f608af569711097fd6014e2"}, + {file = "numpy-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9b4a8148c57ecac25a16b0e11798cbe88edf5237b0df99973687dd866f05e1b"}, + {file = "numpy-2.2.3-cp311-cp311-win32.whl", hash = "sha256:1f45315b2dc58d8a3e7754fe4e38b6fce132dab284a92851e41b2b344f6441c5"}, + {file = "numpy-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f48ba6f6c13e5e49f3d3efb1b51c8193215c42ac82610a04624906a9270be6f"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12c045f43b1d2915eca6b880a7f4a256f59d62df4f044788c8ba67709412128d"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:87eed225fd415bbae787f93a457af7f5990b92a334e346f72070bf569b9c9c95"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:712a64103d97c404e87d4d7c47fb0c7ff9acccc625ca2002848e0d53288b90ea"}, + {file = "numpy-2.2.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a5ae282abe60a2db0fd407072aff4599c279bcd6e9a2475500fc35b00a57c532"}, + {file = "numpy-2.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5266de33d4c3420973cf9ae3b98b54a2a6d53a559310e3236c4b2b06b9c07d4e"}, + {file = "numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b787adbf04b0db1967798dba8da1af07e387908ed1553a0d6e74c084d1ceafe"}, + {file = "numpy-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:34c1b7e83f94f3b564b35f480f5652a47007dd91f7c839f404d03279cc8dd021"}, + {file = "numpy-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d8335b5f1b6e2bce120d55fb17064b0262ff29b459e8493d1785c18ae2553b8"}, + {file = "numpy-2.2.3-cp312-cp312-win32.whl", hash = "sha256:4d9828d25fb246bedd31e04c9e75714a4087211ac348cb39c8c5f99dbb6683fe"}, + {file = "numpy-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:83807d445817326b4bcdaaaf8e8e9f1753da04341eceec705c001ff342002e5d"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bfdb06b395385ea9b91bf55c1adf1b297c9fdb531552845ff1d3ea6e40d5aba"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:23c9f4edbf4c065fddb10a4f6e8b6a244342d95966a48820c614891e5059bb50"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:a0c03b6be48aaf92525cccf393265e02773be8fd9551a2f9adbe7db1fa2b60f1"}, + {file = "numpy-2.2.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:2376e317111daa0a6739e50f7ee2a6353f768489102308b0d98fcf4a04f7f3b5"}, + {file = "numpy-2.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fb62fe3d206d72fe1cfe31c4a1106ad2b136fcc1606093aeab314f02930fdf2"}, + {file = "numpy-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52659ad2534427dffcc36aac76bebdd02b67e3b7a619ac67543bc9bfe6b7cdb1"}, + {file = "numpy-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1b416af7d0ed3271cad0f0a0d0bee0911ed7eba23e66f8424d9f3dfcdcae1304"}, + {file = "numpy-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1402da8e0f435991983d0a9708b779f95a8c98c6b18a171b9f1be09005e64d9d"}, + {file = "numpy-2.2.3-cp313-cp313-win32.whl", hash = "sha256:136553f123ee2951bfcfbc264acd34a2fc2f29d7cdf610ce7daf672b6fbaa693"}, + {file = "numpy-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5b732c8beef1d7bc2d9e476dbba20aaff6167bf205ad9aa8d30913859e82884b"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:435e7a933b9fda8126130b046975a968cc2d833b505475e588339e09f7672890"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7678556eeb0152cbd1522b684dcd215250885993dd00adb93679ec3c0e6e091c"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2e8da03bd561504d9b20e7a12340870dfc206c64ea59b4cfee9fceb95070ee94"}, + {file = "numpy-2.2.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:c9aa4496fd0e17e3843399f533d62857cef5900facf93e735ef65aa4bbc90ef0"}, + {file = "numpy-2.2.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4ca91d61a4bf61b0f2228f24bbfa6a9facd5f8af03759fe2a655c50ae2c6610"}, + {file = "numpy-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deaa09cd492e24fd9b15296844c0ad1b3c976da7907e1c1ed3a0ad21dded6f76"}, + {file = "numpy-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:246535e2f7496b7ac85deffe932896a3577be7af8fb7eebe7146444680297e9a"}, + {file = "numpy-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:daf43a3d1ea699402c5a850e5313680ac355b4adc9770cd5cfc2940e7861f1bf"}, + {file = "numpy-2.2.3-cp313-cp313t-win32.whl", hash = "sha256:cf802eef1f0134afb81fef94020351be4fe1d6681aadf9c5e862af6602af64ef"}, + {file = "numpy-2.2.3-cp313-cp313t-win_amd64.whl", hash = "sha256:aee2512827ceb6d7f517c8b85aa5d3923afe8fc7a57d028cffcd522f1c6fd082"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3c2ec8a0f51d60f1e9c0c5ab116b7fc104b165ada3f6c58abf881cb2eb16044d"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ed2cf9ed4e8ebc3b754d398cba12f24359f018b416c380f577bbae112ca52fc9"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39261798d208c3095ae4f7bc8eaeb3481ea8c6e03dc48028057d3cbdbdb8937e"}, + {file = "numpy-2.2.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:783145835458e60fa97afac25d511d00a1eca94d4a8f3ace9fe2043003c678e4"}, + {file = "numpy-2.2.3.tar.gz", hash = "sha256:dbdc15f0c81611925f382dfa97b3bd0bc2c1ce19d4fe50482cb0ddc12ba30020"}, +] + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pandas" +version = "2.2.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pillow" +version = "10.4.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, + {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, + {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, + {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, + {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, + {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, + {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, + {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, + {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, + {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, + {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, + {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, + {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, + {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, + {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, + {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, + {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, + {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, + {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions ; python_version < \"3.10\""] +xmp = ["defusedxml"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.50" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.8.0" +groups = ["dev"] +files = [ + {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, + {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pycodestyle" +version = "2.12.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, +] + +[[package]] +name = "pydantic" +version = "2.10.6" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyparsing" +version = "3.2.1" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, + {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pytest" +version = "8.3.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.25.3" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, + {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-json-report" +version = "1.5.0" +description = "A pytest plugin to report test results as JSON files" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "pytest-json-report-1.5.0.tar.gz", hash = "sha256:2dde3c647851a19b5f3700729e8310a6e66efb2077d674f27ddea3d34dc615de"}, + {file = "pytest_json_report-1.5.0-py3-none-any.whl", hash = "sha256:9897b68c910b12a2e48dd849f9a284b2c79a732a8a9cb398452ddd23d3c8c325"}, +] + +[package.dependencies] +pytest = ">=3.8.0" +pytest-metadata = "*" + +[[package]] +name = "pytest-metadata" +version = "3.1.1" +description = "pytest plugin for test session metadata" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_metadata-3.1.1-py3-none-any.whl", hash = "sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b"}, + {file = "pytest_metadata-3.1.1.tar.gz", hash = "sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +test = ["black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "tox (>=3.24.5)"] + +[[package]] +name = "pytest-timeout" +version = "2.3.1" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, +] + +[package.dependencies] +execnet = ">=2.1" +pytest = ">=7.0.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2025.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "referencing" +version = "0.36.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, + {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" +typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} + +[[package]] +name = "regex" +version = "2024.11.6" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rpds-py" +version = "0.22.3" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, +] + +[[package]] +name = "scikit-learn" +version = "1.6.1" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}, + {file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}, + {file = "scikit_learn-1.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8634c4bd21a2a813e0a7e3900464e6d593162a29dd35d25bdf0103b3fce60ed5"}, + {file = "scikit_learn-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:775da975a471c4f6f467725dff0ced5c7ac7bda5e9316b260225b48475279a1b"}, + {file = "scikit_learn-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:8a600c31592bd7dab31e1c61b9bbd6dea1b3433e67d264d17ce1017dbdce8002"}, + {file = "scikit_learn-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72abc587c75234935e97d09aa4913a82f7b03ee0b74111dcc2881cba3c5a7b33"}, + {file = "scikit_learn-1.6.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b3b00cdc8f1317b5f33191df1386c0befd16625f49d979fe77a8d44cae82410d"}, + {file = "scikit_learn-1.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc4765af3386811c3ca21638f63b9cf5ecf66261cc4815c1db3f1e7dc7b79db2"}, + {file = "scikit_learn-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25fc636bdaf1cc2f4a124a116312d837148b5e10872147bdaf4887926b8c03d8"}, + {file = "scikit_learn-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:fa909b1a36e000a03c382aade0bd2063fd5680ff8b8e501660c0f59f021a6415"}, + {file = "scikit_learn-1.6.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:926f207c804104677af4857b2c609940b743d04c4c35ce0ddc8ff4f053cddc1b"}, + {file = "scikit_learn-1.6.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c2cae262064e6a9b77eee1c8e768fc46aa0b8338c6a8297b9b6759720ec0ff2"}, + {file = "scikit_learn-1.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1061b7c028a8663fb9a1a1baf9317b64a257fcb036dae5c8752b2abef31d136f"}, + {file = "scikit_learn-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e69fab4ebfc9c9b580a7a80111b43d214ab06250f8a7ef590a4edf72464dd86"}, + {file = "scikit_learn-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:70b1d7e85b1c96383f872a519b3375f92f14731e279a7b4c6cfd650cf5dffc52"}, + {file = "scikit_learn-1.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ffa1e9e25b3d93990e74a4be2c2fc61ee5af85811562f1288d5d055880c4322"}, + {file = "scikit_learn-1.6.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:dc5cf3d68c5a20ad6d571584c0750ec641cc46aeef1c1507be51300e6003a7e1"}, + {file = "scikit_learn-1.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c06beb2e839ecc641366000ca84f3cf6fa9faa1777e29cf0c04be6e4d096a348"}, + {file = "scikit_learn-1.6.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8ca8cb270fee8f1f76fa9bfd5c3507d60c6438bbee5687f81042e2bb98e5a97"}, + {file = "scikit_learn-1.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:7a1c43c8ec9fde528d664d947dc4c0789be4077a3647f232869f41d9bf50e0fb"}, + {file = "scikit_learn-1.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a17c1dea1d56dcda2fac315712f3651a1fea86565b64b48fa1bc090249cbf236"}, + {file = "scikit_learn-1.6.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a7aa5f9908f0f28f4edaa6963c0a6183f1911e63a69aa03782f0d924c830a35"}, + {file = "scikit_learn-1.6.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0650e730afb87402baa88afbf31c07b84c98272622aaba002559b614600ca691"}, + {file = "scikit_learn-1.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:3f59fe08dc03ea158605170eb52b22a105f238a5d512c4470ddeca71feae8e5f"}, + {file = "scikit_learn-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6849dd3234e87f55dce1db34c89a810b489ead832aaf4d4550b7ea85628be6c1"}, + {file = "scikit_learn-1.6.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:e7be3fa5d2eb9be7d77c3734ff1d599151bb523674be9b834e8da6abe132f44e"}, + {file = "scikit_learn-1.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44a17798172df1d3c1065e8fcf9019183f06c87609b49a124ebdf57ae6cb0107"}, + {file = "scikit_learn-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b7a3b86e411e4bce21186e1c180d792f3d99223dcfa3b4f597ecc92fa1a422"}, + {file = "scikit_learn-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7a73d457070e3318e32bdb3aa79a8d990474f19035464dfd8bede2883ab5dc3b"}, + {file = "scikit_learn-1.6.1.tar.gz", hash = "sha256:b4fc2525eca2c69a59260f583c56a7557c6ccdf8deafdba6e060f94c1c59738e"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=3.1.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.17.1)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)", "towncrier (>=24.8.0)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] +maintenance = ["conda-lock (==2.5.6)"] +tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.5.1)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.15.2" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9"}, + {file = "scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5"}, + {file = "scipy-1.15.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:ecf797d2d798cf7c838c6d98321061eb3e72a74710e6c40540f0e8087e3b499e"}, + {file = "scipy-1.15.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:9b18aa747da280664642997e65aab1dd19d0c3d17068a04b3fe34e2559196cb9"}, + {file = "scipy-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87994da02e73549dfecaed9e09a4f9d58a045a053865679aeb8d6d43747d4df3"}, + {file = "scipy-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69ea6e56d00977f355c0f84eba69877b6df084516c602d93a33812aa04d90a3d"}, + {file = "scipy-1.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:888307125ea0c4466287191e5606a2c910963405ce9671448ff9c81c53f85f58"}, + {file = "scipy-1.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9412f5e408b397ff5641080ed1e798623dbe1ec0d78e72c9eca8992976fa65aa"}, + {file = "scipy-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:b5e025e903b4f166ea03b109bb241355b9c42c279ea694d8864d033727205e65"}, + {file = "scipy-1.15.2-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:92233b2df6938147be6fa8824b8136f29a18f016ecde986666be5f4d686a91a4"}, + {file = "scipy-1.15.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:62ca1ff3eb513e09ed17a5736929429189adf16d2d740f44e53270cc800ecff1"}, + {file = "scipy-1.15.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:4c6676490ad76d1c2894d77f976144b41bd1a4052107902238047fb6a473e971"}, + {file = "scipy-1.15.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8bf5cb4a25046ac61d38f8d3c3426ec11ebc350246a4642f2f315fe95bda655"}, + {file = "scipy-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a8e34cf4c188b6dd004654f88586d78f95639e48a25dfae9c5e34a6dc34547e"}, + {file = "scipy-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28a0d2c2075946346e4408b211240764759e0fabaeb08d871639b5f3b1aca8a0"}, + {file = "scipy-1.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:42dabaaa798e987c425ed76062794e93a243be8f0f20fff6e7a89f4d61cb3d40"}, + {file = "scipy-1.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6f5e296ec63c5da6ba6fa0343ea73fd51b8b3e1a300b0a8cae3ed4b1122c7462"}, + {file = "scipy-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:597a0c7008b21c035831c39927406c6181bcf8f60a73f36219b69d010aa04737"}, + {file = "scipy-1.15.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c4697a10da8f8765bb7c83e24a470da5797e37041edfd77fd95ba3811a47c4fd"}, + {file = "scipy-1.15.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:869269b767d5ee7ea6991ed7e22b3ca1f22de73ab9a49c44bad338b725603301"}, + {file = "scipy-1.15.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bad78d580270a4d32470563ea86c6590b465cb98f83d760ff5b0990cb5518a93"}, + {file = "scipy-1.15.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b09ae80010f52efddb15551025f9016c910296cf70adbf03ce2a8704f3a5ad20"}, + {file = "scipy-1.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6fd6eac1ce74a9f77a7fc724080d507c5812d61e72bd5e4c489b042455865e"}, + {file = "scipy-1.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b871df1fe1a3ba85d90e22742b93584f8d2b8e6124f8372ab15c71b73e428b8"}, + {file = "scipy-1.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:03205d57a28e18dfd39f0377d5002725bf1f19a46f444108c29bdb246b6c8a11"}, + {file = "scipy-1.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:601881dfb761311045b03114c5fe718a12634e5608c3b403737ae463c9885d53"}, + {file = "scipy-1.15.2-cp312-cp312-win_amd64.whl", hash = "sha256:e7c68b6a43259ba0aab737237876e5c2c549a031ddb7abc28c7b47f22e202ded"}, + {file = "scipy-1.15.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01edfac9f0798ad6b46d9c4c9ca0e0ad23dbf0b1eb70e96adb9fa7f525eff0bf"}, + {file = "scipy-1.15.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:08b57a9336b8e79b305a143c3655cc5bdbe6d5ece3378578888d2afbb51c4e37"}, + {file = "scipy-1.15.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:54c462098484e7466362a9f1672d20888f724911a74c22ae35b61f9c5919183d"}, + {file = "scipy-1.15.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:cf72ff559a53a6a6d77bd8eefd12a17995ffa44ad86c77a5df96f533d4e6c6bb"}, + {file = "scipy-1.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de9d1416b3d9e7df9923ab23cd2fe714244af10b763975bea9e4f2e81cebd27"}, + {file = "scipy-1.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb530e4794fc8ea76a4a21ccb67dea33e5e0e60f07fc38a49e821e1eae3b71a0"}, + {file = "scipy-1.15.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5ea7ed46d437fc52350b028b1d44e002646e28f3e8ddc714011aaf87330f2f32"}, + {file = "scipy-1.15.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11e7ad32cf184b74380f43d3c0a706f49358b904fa7d5345f16ddf993609184d"}, + {file = "scipy-1.15.2-cp313-cp313-win_amd64.whl", hash = "sha256:a5080a79dfb9b78b768cebf3c9dcbc7b665c5875793569f48bf0e2b1d7f68f6f"}, + {file = "scipy-1.15.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:447ce30cee6a9d5d1379087c9e474628dab3db4a67484be1b7dc3196bfb2fac9"}, + {file = "scipy-1.15.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c90ebe8aaa4397eaefa8455a8182b164a6cc1d59ad53f79943f266d99f68687f"}, + {file = "scipy-1.15.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:def751dd08243934c884a3221156d63e15234a3155cf25978b0a668409d45eb6"}, + {file = "scipy-1.15.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:302093e7dfb120e55515936cb55618ee0b895f8bcaf18ff81eca086c17bd80af"}, + {file = "scipy-1.15.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd5b77413e1855351cdde594eca99c1f4a588c2d63711388b6a1f1c01f62274"}, + {file = "scipy-1.15.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d0194c37037707b2afa7a2f2a924cf7bac3dc292d51b6a925e5fcb89bc5c776"}, + {file = "scipy-1.15.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:bae43364d600fdc3ac327db99659dcb79e6e7ecd279a75fe1266669d9a652828"}, + {file = "scipy-1.15.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f031846580d9acccd0044efd1a90e6f4df3a6e12b4b6bd694a7bc03a89892b28"}, + {file = "scipy-1.15.2-cp313-cp313t-win_amd64.whl", hash = "sha256:fe8a9eb875d430d81755472c5ba75e84acc980e4a8f6204d402849234d3017db"}, + {file = "scipy-1.15.2.tar.gz", hash = "sha256:cd58a314d92838f7e6f755c8a2167ead4f27e1fd5c1251fd54289569ef3495ec"}, +] + +[package.dependencies] +numpy = ">=1.23.5,<2.5" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] +doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.16.5)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] +test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "segtok" +version = "1.5.11" +description = "sentence segmentation and word tokenization tools" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "segtok-1.5.11-py3-none-any.whl", hash = "sha256:910616b76198c3141b2772df530270d3b706e42ae69a5b30ef115c7bd5d1501a"}, + {file = "segtok-1.5.11.tar.gz", hash = "sha256:8ab2dd44245bcbfec25b575dc4618473bbdf2af8c2649698cd5a370f42f3db23"}, +] + +[package.dependencies] +regex = "*" + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "soupsieve" +version = "2.6" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "swarmauri-base" +version = "0.6.1.dev13" +description = "This repository includes base classes and mixins for the Swarmauri framework." +optional = false +python-versions = ">=3.10,<3.13" +groups = ["main"] +files = [] +develop = false + +[package.dependencies] +pydantic = "^2.0" +swarmauri_core = {git = "https://github.com/swarmauri/swarmauri-sdk.git", branch = "mono/dev", subdirectory = "pkgs/core"} + +[package.source] +type = "git" +url = "https://github.com/swarmauri/swarmauri-sdk.git" +reference = "mono/dev" +resolved_reference = "8f42a607beadf12a7463e543fe36ba5600f5ecb8" +subdirectory = "pkgs/base" + +[[package]] +name = "swarmauri-core" +version = "0.6.1.dev13" +description = "This repository includes core interfaces for the Swarmauri framework." +optional = false +python-versions = ">=3.10,<3.13" +groups = ["main"] +files = [] +develop = false + +[package.dependencies] +pydantic = "^2.0" +pyyaml = "^6.0.2" + +[package.source] +type = "git" +url = "https://github.com/swarmauri/swarmauri-sdk.git" +reference = "mono/dev" +resolved_reference = "8f42a607beadf12a7463e543fe36ba5600f5ecb8" +subdirectory = "pkgs/core" + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "threadpoolctl" +version = "3.5.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, + {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2025.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +files = [ + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "yake" +version = "0.4.8" +description = "Keyword extraction Python package" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "yake-0.4.8-py2.py3-none-any.whl", hash = "sha256:d46793266826468b4aecb668c51e677b7bc304f1bd3a15e100e324852ec5a0c3"}, + {file = "yake-0.4.8.tar.gz", hash = "sha256:859f379ac49ca204a0bc1527217f937321e87b68287f81db9700fc1039fd529a"}, +] + +[package.dependencies] +click = ">=6.0" +jellyfish = "*" +networkx = "*" +numpy = "*" +segtok = "*" +tabulate = "*" + +[metadata] +lock-version = "2.1" +python-versions = ">=3.10,<3.13" +content-hash = "6c1d6a16a04ff128d154f214ea18e11a9eeade75051e300c08c6ab465f49b64d" diff --git a/pkgs/standards/swarmauri_standard/pyproject.toml b/pkgs/swarmauri_standard/pyproject.toml similarity index 82% rename from pkgs/standards/swarmauri_standard/pyproject.toml rename to pkgs/swarmauri_standard/pyproject.toml index 3f2c4a63c..a371cc505 100644 --- a/pkgs/standards/swarmauri_standard/pyproject.toml +++ b/pkgs/swarmauri_standard/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "swarmauri-standard" -version = "0.6.1.dev9" +version = "0.6.1" description = "This repository includes standard components within the Swarmauri framework." authors = ["Jacob Stewart "] license = "Apache-2.0" @@ -23,20 +23,33 @@ swarmauri_base = { git = "https://github.com/swarmauri/swarmauri-sdk.git", branc # Dependencies toml = "^0.10.2" httpx = "^0.27.0" -joblib = "^1.4.0" numpy = "*" pandas = "*" pydantic = "^2.9.2" typing_extensions = "*" +##################### +# Technical Debt +# +##################### +# Should only appear on a standalone package +#scikit-learn = "^1.6.1" +#matplotlib = "^3.10.0" +Pillow = ">=8.0,<11.0" +#beautifulsoup4 = "^4.13.3" +#scipy = ">=1.7.0,<1.14.0" +#yake = "^0.4.8" + +# Are we dependent upon this forever? +aiofiles = { version = "24.1.0"} + # We should remove and only rely on httpx requests = "^2.32.3" -# This should be set to optional also -Pillow = ">=8.0,<11.0" +# what requires joblib? +joblib = "^1.4.0" +########################### -# Optional dependencies with versions specified -aiofiles = { version = "24.1.0"} [tool.poetry.group.dev.dependencies] @@ -63,6 +76,8 @@ markers = [ "test: standard test", "unit: Unit tests", "integration: Integration tests", + "i9n: Integration tests", + "r8n: Regression tests", "acceptance: Acceptance tests", "experimental: Experimental tests", "timeout: mark test to timeout after X seconds", diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/README.md b/pkgs/swarmauri_standard/swarmauri_standard/README.md similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/README.md rename to pkgs/swarmauri_standard/swarmauri_standard/README.md diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/agents/QAAgent.py b/pkgs/swarmauri_standard/swarmauri_standard/agents/QAAgent.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/agents/QAAgent.py rename to pkgs/swarmauri_standard/swarmauri_standard/agents/QAAgent.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/agents/RagAgent.py b/pkgs/swarmauri_standard/swarmauri_standard/agents/RagAgent.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/agents/RagAgent.py rename to pkgs/swarmauri_standard/swarmauri_standard/agents/RagAgent.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/agents/SimpleConversationAgent.py b/pkgs/swarmauri_standard/swarmauri_standard/agents/SimpleConversationAgent.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/agents/SimpleConversationAgent.py rename to pkgs/swarmauri_standard/swarmauri_standard/agents/SimpleConversationAgent.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/agents/ToolAgent.py b/pkgs/swarmauri_standard/swarmauri_standard/agents/ToolAgent.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/agents/ToolAgent.py rename to pkgs/swarmauri_standard/swarmauri_standard/agents/ToolAgent.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/agents/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/agents/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/agents/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/agents/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chains/CallableChain.py b/pkgs/swarmauri_standard/swarmauri_standard/chains/CallableChain.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chains/CallableChain.py rename to pkgs/swarmauri_standard/swarmauri_standard/chains/CallableChain.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chains/ChainStep.py b/pkgs/swarmauri_standard/swarmauri_standard/chains/ChainStep.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chains/ChainStep.py rename to pkgs/swarmauri_standard/swarmauri_standard/chains/ChainStep.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chains/ContextChain.py b/pkgs/swarmauri_standard/swarmauri_standard/chains/ContextChain.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chains/ContextChain.py rename to pkgs/swarmauri_standard/swarmauri_standard/chains/ContextChain.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chains/PromptContextChain.py b/pkgs/swarmauri_standard/swarmauri_standard/chains/PromptContextChain.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chains/PromptContextChain.py rename to pkgs/swarmauri_standard/swarmauri_standard/chains/PromptContextChain.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chains/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/chains/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chains/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/chains/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/DelimiterBasedChunker.py b/pkgs/swarmauri_standard/swarmauri_standard/chunkers/DelimiterBasedChunker.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/DelimiterBasedChunker.py rename to pkgs/swarmauri_standard/swarmauri_standard/chunkers/DelimiterBasedChunker.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/FixedLengthChunker.py b/pkgs/swarmauri_standard/swarmauri_standard/chunkers/FixedLengthChunker.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/FixedLengthChunker.py rename to pkgs/swarmauri_standard/swarmauri_standard/chunkers/FixedLengthChunker.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/MdSnippetChunker.py b/pkgs/swarmauri_standard/swarmauri_standard/chunkers/MdSnippetChunker.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/MdSnippetChunker.py rename to pkgs/swarmauri_standard/swarmauri_standard/chunkers/MdSnippetChunker.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/SentenceChunker.py b/pkgs/swarmauri_standard/swarmauri_standard/chunkers/SentenceChunker.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/SentenceChunker.py rename to pkgs/swarmauri_standard/swarmauri_standard/chunkers/SentenceChunker.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/SlidingWindowChunker.py b/pkgs/swarmauri_standard/swarmauri_standard/chunkers/SlidingWindowChunker.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/SlidingWindowChunker.py rename to pkgs/swarmauri_standard/swarmauri_standard/chunkers/SlidingWindowChunker.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/chunkers/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/chunkers/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/chunkers/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/control_panels/ControlPanel.py b/pkgs/swarmauri_standard/swarmauri_standard/control_panels/ControlPanel.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/control_panels/ControlPanel.py rename to pkgs/swarmauri_standard/swarmauri_standard/control_panels/ControlPanel.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/control_panels/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/control_panels/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/control_panels/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/control_panels/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/Conversation.py b/pkgs/swarmauri_standard/swarmauri_standard/conversations/Conversation.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/Conversation.py rename to pkgs/swarmauri_standard/swarmauri_standard/conversations/Conversation.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/MaxSizeConversation.py b/pkgs/swarmauri_standard/swarmauri_standard/conversations/MaxSizeConversation.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/MaxSizeConversation.py rename to pkgs/swarmauri_standard/swarmauri_standard/conversations/MaxSizeConversation.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/MaxSystemContextConversation.py b/pkgs/swarmauri_standard/swarmauri_standard/conversations/MaxSystemContextConversation.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/MaxSystemContextConversation.py rename to pkgs/swarmauri_standard/swarmauri_standard/conversations/MaxSystemContextConversation.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/SessionCacheConversation.py b/pkgs/swarmauri_standard/swarmauri_standard/conversations/SessionCacheConversation.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/SessionCacheConversation.py rename to pkgs/swarmauri_standard/swarmauri_standard/conversations/SessionCacheConversation.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/conversations/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/conversations/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/conversations/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/dataconnectors/GoogleDriveDataConnector.py b/pkgs/swarmauri_standard/swarmauri_standard/dataconnectors/GoogleDriveDataConnector.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/dataconnectors/GoogleDriveDataConnector.py rename to pkgs/swarmauri_standard/swarmauri_standard/dataconnectors/GoogleDriveDataConnector.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/dataconnectors/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/dataconnectors/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/dataconnectors/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/dataconnectors/__init__.py diff --git a/pkgs/swarmauri_standard/swarmauri_standard/decorators/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/decorators/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/swarmauri_standard/swarmauri_standard/decorators/deprecate.py b/pkgs/swarmauri_standard/swarmauri_standard/decorators/deprecate.py new file mode 100644 index 000000000..4abe87687 --- /dev/null +++ b/pkgs/swarmauri_standard/swarmauri_standard/decorators/deprecate.py @@ -0,0 +1,134 @@ +import warnings +import functools + + +def _construct_deprecation_message( + item_type: str, item_name: str, since: str, removed_in: str, alternative: str = None +) -> str: + """ + Helper function to construct a deprecation message. + + :param item_type: A string indicating what is being deprecated (e.g. "class", "method", "import path"). + :param item_name: The name of the item (e.g. "OldClass", "my_old_module"). + :param since: Version when deprecation started. + :param removed_in: Version when the deprecated item will be removed. + :param alternative: Suggested replacement, or None if no replacement exists. + :return: A formatted warning message. + """ + base_msg = ( + f"The {item_type} '{item_name}' is deprecated as of version {since} " + f"and will be removed in version {removed_in}. " + ) + if alternative: + base_msg += f"Use '{alternative}' instead." + else: + base_msg += "No replacement is available." + return base_msg + + +def deprecated_import_path( + item_name: str, since: str, removed_in: str, alternative: str = None +): + """ + A decorator to trigger a DeprecationWarning at import time for a deprecated import path. + + How It Works: + ------------- + 1. You apply this decorator to a dummy class (or function) at the top-level of the + deprecated module. + 2. When the module is imported, Python reads the decorated class, triggering + this decorator's code immediately, thus issuing the warning. + + Example Usage in old_module.py: + ------------------------------- + from my_deprecations import deprecate_import_path + + @deprecate_import_path( + item_name='old_module', + since='1.0.0', + removed_in='2.0.0', + alternative='new_module' + ) + class _ImportDeprecationTrigger: + # This class is never actually used; it just triggers the warning at import time. + pass + + def some_old_function(): + return "Doing old stuff..." + + Then, any code `import old_module` will emit a DeprecationWarning (provided + DeprecationWarnings are not filtered out by default). + """ + # Construct the warning message once, outside the inner decorator + warning_msg = _construct_deprecation_message( + item_type="import path", + item_name=item_name, + since=since, + removed_in=removed_in, + alternative=alternative, + ) + + def decorator(obj): + # Trigger the warning at import time (when Python first sees the decorated object). + warnings.warn( + warning_msg, + category=DeprecationWarning, + stacklevel=2, # Ensures warning points to the user's import line (or near it). + ) + return obj # Return the object unmodified (usually a dummy class). + + return decorator + + +def deprecated_class(since: str, removed_in: str, alternative: str = None): + """ + Class decorator that marks the class as deprecated. A DeprecationWarning + is raised whenever an instance of the class is created. + """ + + def decorator(cls): + original_init = cls.__init__ + item_name = cls.__name__ + + @functools.wraps(original_init) + def new_init(self, *args, **kwargs): + message = _construct_deprecation_message( + item_type="class", + item_name=item_name, + since=since, + removed_in=removed_in, + alternative=alternative, + ) + warnings.warn(message, category=DeprecationWarning, stacklevel=2) + return original_init(self, *args, **kwargs) + + cls.__init__ = new_init + return cls + + return decorator + + +def deprecated_method(since: str, removed_in: str, alternative: str = None): + """ + Method (or function) decorator that raises a DeprecationWarning whenever + the decorated method is called. + """ + + def decorator(func): + item_name = func.__name__ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + message = _construct_deprecation_message( + item_type="method", + item_name=item_name, + since=since, + removed_in=removed_in, + alternative=alternative, + ) + warnings.warn(message, category=DeprecationWarning, stacklevel=2) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/pkgs/swarmauri_standard/swarmauri_standard/decorators/maybe_async.py b/pkgs/swarmauri_standard/swarmauri_standard/decorators/maybe_async.py new file mode 100644 index 000000000..88d16faf7 --- /dev/null +++ b/pkgs/swarmauri_standard/swarmauri_standard/decorators/maybe_async.py @@ -0,0 +1,20 @@ +import asyncio +from functools import wraps + + +def maybe_async(func): + """ + If the decorated function is called within an event loop, run async. + Otherwise, run sync using asyncio.run. + """ + + @wraps(func) + def wrapper(*args, **kwargs): + try: + # Check for a running loop without assignment. + asyncio.get_running_loop() + return func(*args, **kwargs) + except RuntimeError: + return asyncio.run(func(*args, **kwargs)) + + return wrapper diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/retry_decorator.py b/pkgs/swarmauri_standard/swarmauri_standard/decorators/retry_on_status_codes.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/retry_decorator.py rename to pkgs/swarmauri_standard/swarmauri_standard/decorators/retry_on_status_codes.py diff --git a/pkgs/swarmauri_standard/swarmauri_standard/decorators/tool_decorator.py b/pkgs/swarmauri_standard/swarmauri_standard/decorators/tool_decorator.py new file mode 100644 index 000000000..1da75ba91 --- /dev/null +++ b/pkgs/swarmauri_standard/swarmauri_standard/decorators/tool_decorator.py @@ -0,0 +1,63 @@ +import inspect +from typing import get_type_hints, List, Any +from pydantic import Field + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +def tool(func): + """ + Decorator that creates a dynamic ToolBase subclass from the decorated function. + + The generated tool will: + - Use the function name as the `name` and `type` of the tool. + - Derive parameters from the function's signature and type hints. + - Use the function's docstring as the tool's description. + """ + # Capture function name and docstring + func_name = func.__name__ + docstring = func.__doc__ or "" + + # Inspect the function signature for parameter names, defaults, etc. + signature = inspect.signature(func) + type_hints = get_type_hints(func) + + # Build the list of Parameter objects from the function signature + parameters_list: List[Parameter] = [] + for param_name, param in signature.parameters.items(): + # If the parameter has a type annotation, grab it; otherwise use "string" as default + annotated_type = type_hints.get(param_name, str) + + # Derive a required flag by checking if the parameter has a default + required = param.default == inspect.Parameter.empty + + # Use the parameter’s name, the string version of the annotated type, etc. + parameters_list.append( + Parameter( + name=param_name, + type=annotated_type.__name__, + description=f"Parameter for {param_name}", + required=required, + ) + ) + + # Dynamically create the subclass of ToolBase + @ComponentBase.register_type(ToolBase, func_name) + class FunctionTool(ToolBase): + version: str = "1.0.0" + parameters: List[Parameter] = Field(default_factory=lambda: parameters_list) + name: str = func_name + description: str = docstring + # The tool type is set to be the same as the function name + type: str = func_name + + def __call__(self, *args, **kwargs) -> Any: + """ + Invoke the underlying function with the provided arguments. + """ + return func(*args, **kwargs) + + # Return an *instance* of this generated class (or the class itself) + return FunctionTool() diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/CanberraDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/CanberraDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/CanberraDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/CanberraDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/ChebyshevDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/ChebyshevDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/ChebyshevDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/ChebyshevDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/ChiSquaredDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/ChiSquaredDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/ChiSquaredDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/ChiSquaredDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/CosineDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/CosineDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/CosineDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/CosineDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/EuclideanDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/EuclideanDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/EuclideanDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/EuclideanDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/HaversineDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/HaversineDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/HaversineDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/HaversineDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/JaccardIndexDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/JaccardIndexDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/JaccardIndexDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/JaccardIndexDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/LevenshteinDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/LevenshteinDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/LevenshteinDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/LevenshteinDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/ManhattanDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/ManhattanDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/ManhattanDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/ManhattanDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/SorensenDiceDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/SorensenDiceDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/SorensenDiceDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/SorensenDiceDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/SquaredEuclideanDistance.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/SquaredEuclideanDistance.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/SquaredEuclideanDistance.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/SquaredEuclideanDistance.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/distances/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/distances/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/distances/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/distances/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/documents/Document.py b/pkgs/swarmauri_standard/swarmauri_standard/documents/Document.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/documents/Document.py rename to pkgs/swarmauri_standard/swarmauri_standard/documents/Document.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/documents/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/documents/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/documents/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/documents/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/CohereEmbedding.py b/pkgs/swarmauri_standard/swarmauri_standard/embeddings/CohereEmbedding.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/CohereEmbedding.py rename to pkgs/swarmauri_standard/swarmauri_standard/embeddings/CohereEmbedding.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/GeminiEmbedding.py b/pkgs/swarmauri_standard/swarmauri_standard/embeddings/GeminiEmbedding.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/GeminiEmbedding.py rename to pkgs/swarmauri_standard/swarmauri_standard/embeddings/GeminiEmbedding.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/MistralEmbedding.py b/pkgs/swarmauri_standard/swarmauri_standard/embeddings/MistralEmbedding.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/MistralEmbedding.py rename to pkgs/swarmauri_standard/swarmauri_standard/embeddings/MistralEmbedding.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/OpenAIEmbedding.py b/pkgs/swarmauri_standard/swarmauri_standard/embeddings/OpenAIEmbedding.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/OpenAIEmbedding.py rename to pkgs/swarmauri_standard/swarmauri_standard/embeddings/OpenAIEmbedding.py diff --git a/pkgs/swarmauri_standard/swarmauri_standard/embeddings/TfidfEmbedding.py b/pkgs/swarmauri_standard/swarmauri_standard/embeddings/TfidfEmbedding.py new file mode 100644 index 000000000..c0d5772ce --- /dev/null +++ b/pkgs/swarmauri_standard/swarmauri_standard/embeddings/TfidfEmbedding.py @@ -0,0 +1,133 @@ +from typing import List, Literal +import joblib +import math +from collections import Counter, defaultdict +from pydantic import PrivateAttr + +from swarmauri_base.embeddings.EmbeddingBase import EmbeddingBase +from swarmauri_standard.vectors.Vector import Vector +from swarmauri_core.ComponentBase import ComponentBase + + +@ComponentBase.register_type(EmbeddingBase, "TfidfEmbedding") +class TfidfEmbedding(EmbeddingBase): + # Private attributes to store our custom model data. + _fit_matrix = PrivateAttr() + _features = PrivateAttr() # Sorted list of vocabulary terms. + _idf = PrivateAttr() # Dict mapping term -> idf value. + + type: Literal["TfidfEmbedding"] = "TfidfEmbedding" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + # Initialize our internal attributes. + self._features = [] # This will hold our vocabulary. + self._idf = {} # This will hold the computed idf for each token. + self._fit_matrix = [] # This will hold the TF-IDF vectors. + + def extract_features(self) -> List[str]: + """ + Returns the list of features (vocabulary terms) that were extracted during fitting. + """ + return self._features + + def fit(self, documents: List[str]) -> None: + """ + Fits the TF-IDF model on the provided documents. + It computes the vocabulary, document frequencies, idf values, and the TF-IDF + vectors for each document. + """ + N = len(documents) + df = defaultdict(int) + tokenized_docs = [] + + # Tokenize documents and compute document frequency for each token. + for doc in documents: + # Simple tokenization: lowercasing and splitting on whitespace. + tokens = doc.lower().split() + tokenized_docs.append(tokens) + for token in set(tokens): # use set() to count each token once per doc + df[token] += 1 + + # Build a sorted vocabulary for consistent vector ordering. + self._features = sorted(list(df.keys())) + + # Compute idf for each term using the formula: log(N / df) + self._idf = {token: math.log(N / df[token]) for token in self._features} + + # Now compute the TF-IDF vector for each document. + self._fit_matrix = [] + for tokens in tokenized_docs: + tf = Counter(tokens) + doc_len = len(tokens) + vector = [] + for token in self._features: + # Compute term frequency (TF) for the token in this document. + tf_value = tf[token] / doc_len if doc_len > 0 else 0.0 + # Multiply by idf to get the TF-IDF weight. + tfidf_value = tf_value * self._idf[token] + vector.append(tfidf_value) + self._fit_matrix.append(vector) + + def fit_transform(self, documents: List[str]) -> List[Vector]: + """ + Fits the model on the provided documents and returns the TF-IDF vectors as a list + of Vector instances. + """ + self.fit(documents) + return [Vector(value=vec) for vec in self._fit_matrix] + + def transform(self, documents: List[str]) -> List[Vector]: + """ + Transforms new documents into TF-IDF vectors using the vocabulary and idf values + computed during fitting. Any term not in the vocabulary is ignored. + """ + if not self._features or not self._idf: + raise ValueError( + "The model has not been fitted yet. Please call fit first." + ) + + transformed_vectors = [] + for doc in documents: + tokens = doc.lower().split() + tf = Counter(tokens) + doc_len = len(tokens) + vector = [] + for token in self._features: + tf_value = tf[token] / doc_len if doc_len > 0 else 0.0 + # If the token is not in the fitted vocabulary, its idf defaults to 0. + idf_value = self._idf.get(token, 0.0) + vector.append(tf_value * idf_value) + transformed_vectors.append(Vector(value=vector)) + return transformed_vectors + + def infer_vector(self, data: str, documents: List[str]) -> Vector: + """ + Infers a TF-IDF vector for a new document. In this implementation, we append the + new document to the provided corpus, re-fit the model, and return the vector for + the new document. (Note: This re-fits the model which might be inefficient for + production but mirrors the original logic.) + """ + documents.append(data) + vectors = self.fit_transform(documents) + return vectors[-1] + + def save_model(self, path: str) -> None: + """ + Saves the TF-IDF model (i.e. the vocabulary and idf values) to the specified path + using joblib. + """ + model_data = { + "features": self._features, + "idf": self._idf, + } + joblib.dump(model_data, path) + + def load_model(self, path: str) -> None: + """ + Loads a TF-IDF model (i.e. the vocabulary and idf values) from the specified path + using joblib. + """ + model_data = joblib.load(path) + self._features = model_data.get("features", []) + self._idf = model_data.get("idf", {}) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/VoyageEmbedding.py b/pkgs/swarmauri_standard/swarmauri_standard/embeddings/VoyageEmbedding.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/VoyageEmbedding.py rename to pkgs/swarmauri_standard/swarmauri_standard/embeddings/VoyageEmbedding.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/embeddings/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/embeddings/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/embeddings/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/exceptions/IndexErrorWithContext.py b/pkgs/swarmauri_standard/swarmauri_standard/exceptions/IndexErrorWithContext.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/exceptions/IndexErrorWithContext.py rename to pkgs/swarmauri_standard/swarmauri_standard/exceptions/IndexErrorWithContext.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/exceptions/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/exceptions/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/exceptions/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/exceptions/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/factories/AgentFactory.py b/pkgs/swarmauri_standard/swarmauri_standard/factories/AgentFactory.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/factories/AgentFactory.py rename to pkgs/swarmauri_standard/swarmauri_standard/factories/AgentFactory.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/factories/Factory.py b/pkgs/swarmauri_standard/swarmauri_standard/factories/Factory.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/factories/Factory.py rename to pkgs/swarmauri_standard/swarmauri_standard/factories/Factory.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/factories/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/factories/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/factories/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/factories/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/BlackForestImgGenModel.py b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/BlackForestImgGenModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/BlackForestImgGenModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/image_gens/BlackForestImgGenModel.py index bc6dea3f4..f1406da20 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/BlackForestImgGenModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/BlackForestImgGenModel.py @@ -40,7 +40,7 @@ def __init__(self, **kwargs): "X-Key": self.api_key.get_secret_value(), } self._client = httpx.Client(headers=self._headers, timeout=self.timeout) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] async def _get_async_client(self) -> httpx.AsyncClient: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/DeepInfraImgGenModel.py b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/DeepInfraImgGenModel.py similarity index 98% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/DeepInfraImgGenModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/image_gens/DeepInfraImgGenModel.py index e020b1871..f62a7bffc 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/DeepInfraImgGenModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/DeepInfraImgGenModel.py @@ -51,7 +51,7 @@ def __init__(self, **kwargs): "Authorization": f"Bearer {self.api_key.get_secret_value()}", } self._client = httpx.Client(headers=self._headers, timeout=self.timeout) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] async def _get_async_client(self) -> httpx.AsyncClient: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/FalAIImgGenModel.py b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/FalAIImgGenModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/FalAIImgGenModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/image_gens/FalAIImgGenModel.py index 26b0be92f..72a3c5d81 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/FalAIImgGenModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/FalAIImgGenModel.py @@ -53,7 +53,7 @@ def __init__(self, **kwargs): "Authorization": f"Key {self.api_key.get_secret_value()}", } self._client = httpx.Client(headers=self._headers, timeout=self.timeout) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] async def _get_async_client(self) -> httpx.AsyncClient: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/HyperbolicImgGenModel.py b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/HyperbolicImgGenModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/HyperbolicImgGenModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/image_gens/HyperbolicImgGenModel.py index 485103e31..7958acd04 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/HyperbolicImgGenModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/HyperbolicImgGenModel.py @@ -67,7 +67,7 @@ def __init__(self, **kwargs): "Authorization": f"Bearer {self.api_key.get_secret_value()}", } self._client = httpx.Client(headers=self._headers, timeout=30) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] async def _get_async_client(self) -> httpx.AsyncClient: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/OpenAIImgGenModel.py b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/OpenAIImgGenModel.py similarity index 98% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/OpenAIImgGenModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/image_gens/OpenAIImgGenModel.py index 651a5208c..7d44d168b 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/OpenAIImgGenModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/OpenAIImgGenModel.py @@ -43,7 +43,7 @@ def __init__(self, **kwargs) -> None: "Authorization": f"Bearer {self.api_key.get_secret_value()}", "Content-Type": "application/json", } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @retry_on_status_codes((429, 529), max_retries=1) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/image_gens/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/image_gens/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/image_gens/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/AI21StudioModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/AI21StudioModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/AI21StudioModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/AI21StudioModel.py index 4a0eba28a..6ebb30885 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/AI21StudioModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/AI21StudioModel.py @@ -61,7 +61,7 @@ def __init__(self, **data) -> None: base_url=self._BASE_URL, timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages(self, messages: List[Type["MessageBase"]]) -> List[dict]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/AnthropicModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/AnthropicModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/AnthropicModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/AnthropicModel.py index 6c0dc236b..f7211ed0c 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/AnthropicModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/AnthropicModel.py @@ -54,7 +54,7 @@ def __init__(self, **data): headers=headers, base_url=self._BASE_URL, timeout=self.timeout ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/AnthropicToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/AnthropicToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/AnthropicToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/AnthropicToolModel.py index cbbc60d0c..427378dc0 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/AnthropicToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/AnthropicToolModel.py @@ -69,7 +69,7 @@ def __init__(self, **data): self._async_client = httpx.AsyncClient( headers=headers, base_url=self._BASE_URL, timeout=self.timeout ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/CohereModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/CohereModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/CohereModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/CohereModel.py index b3fceda4b..00fc8a9bf 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/CohereModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/CohereModel.py @@ -55,7 +55,7 @@ def __init__(self, **data): self._client = httpx.Client( headers=headers, base_url=self._BASE_URL, timeout=self.timeout ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def get_headers(self) -> Dict[str, str]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/CohereToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/CohereToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/CohereToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/CohereToolModel.py index ad6f5441c..e550b7936 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/CohereToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/CohereToolModel.py @@ -75,7 +75,7 @@ def __init__(self, **data): self._async_client = httpx.AsyncClient( headers=headers, base_url=self._BASE_URL, timeout=self.timeout ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/DeepInfraModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/DeepInfraModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/DeepInfraModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/DeepInfraModel.py index bacf33cf1..33ad30a19 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/DeepInfraModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/DeepInfraModel.py @@ -69,7 +69,7 @@ def __init__(self, **data): headers=headers, base_url=self._BASE_URL, timeout=self.timeout ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/DeepSeekModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/DeepSeekModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/DeepSeekModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/DeepSeekModel.py index 2a5efe462..23d4ce184 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/DeepSeekModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/DeepSeekModel.py @@ -57,7 +57,7 @@ def __init__(self, **data): timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/FalAIVisionModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/FalAIVisionModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/FalAIVisionModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/FalAIVisionModel.py index 6fe89f462..13e12b520 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/FalAIVisionModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/FalAIVisionModel.py @@ -67,7 +67,7 @@ def __init__(self, **data): } self._client = httpx.Client(headers=self._headers, timeout=self.timeout) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @retry_on_status_codes((429, 529), max_retries=1) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GeminiProModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/GeminiProModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GeminiProModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/GeminiProModel.py index ea44bb0d1..08e98ab8c 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GeminiProModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/GeminiProModel.py @@ -81,7 +81,7 @@ def __init__(self, api_key: SecretStr, **kwargs): ) ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GeminiToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/GeminiToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GeminiToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/GeminiToolModel.py index ba5e5c33f..2066a60bb 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GeminiToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/GeminiToolModel.py @@ -84,7 +84,7 @@ def __init__(self, api_key: SecretStr, name: str): name (str): The name of the Gemini model in use. """ self.api_key = api_key - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqAIAudio.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/GroqAIAudio.py similarity index 98% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqAIAudio.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/GroqAIAudio.py index acecabb32..6e8fbf0ef 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqAIAudio.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/GroqAIAudio.py @@ -63,7 +63,7 @@ def __init__(self, **data): timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @retry_on_status_codes((429, 529), max_retries=1) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/GroqModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/GroqModel.py index 94c9bfecb..efa707ba9 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/GroqModel.py @@ -62,7 +62,7 @@ def __init__(self, **data): timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/GroqToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/GroqToolModel.py index 70720bb2b..6b7f9338d 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/GroqToolModel.py @@ -75,7 +75,7 @@ def __init__(self, **data): timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqVisionModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/GroqVisionModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqVisionModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/GroqVisionModel.py index 4143b34c3..4dbb2bf10 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/GroqVisionModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/GroqVisionModel.py @@ -70,7 +70,7 @@ def __init__(self, **data): timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/HyperbolicAudioTTS.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/HyperbolicAudioTTS.py similarity index 98% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/HyperbolicAudioTTS.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/HyperbolicAudioTTS.py index aa8895d4c..198d553d4 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/HyperbolicAudioTTS.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/HyperbolicAudioTTS.py @@ -70,7 +70,7 @@ def __init__(self, **data): "Authorization": f"Bearer {self.api_key.get_secret_value()}", "Content-Type": "application/json", } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _prepare_payload(self, text: str) -> Dict: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/HyperbolicModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/HyperbolicModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/HyperbolicModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/HyperbolicModel.py index 943a77236..f2df8f123 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/HyperbolicModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/HyperbolicModel.py @@ -55,7 +55,7 @@ def __init__(self, **data) -> None: base_url=self._BASE_URL, timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/HyperbolicVisionModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/HyperbolicVisionModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/HyperbolicVisionModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/HyperbolicVisionModel.py index c9dd1192f..c94aa1525 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/HyperbolicVisionModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/HyperbolicVisionModel.py @@ -66,7 +66,7 @@ def __init__(self, **data): base_url=self._BASE_URL, timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/swarmauri_standard/swarmauri_standard/llms/LlamaCppModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/LlamaCppModel.py new file mode 100644 index 000000000..96dc5436f --- /dev/null +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/LlamaCppModel.py @@ -0,0 +1,380 @@ +import asyncio +import json +from typing import AsyncIterator, Dict, Iterator, List, Literal, Optional + +import httpx +from pydantic import PrivateAttr, SecretStr +from swarmauri_base.llms.LLMBase import LLMBase +from swarmauri_base.messages.MessageBase import MessageBase +from swarmauri_core.ComponentBase import ComponentBase, SubclassUnion + +from swarmauri_standard.messages.AgentMessage import AgentMessage +from swarmauri_standard.utils.retry_decorator import retry_on_status_codes + + +@ComponentBase.register_type(LLMBase, "LlamaCppModel") +class LlamaCppModel(LLMBase): + """ + A class for interacting with DeepInfra's model API for text generation. + + This implementation uses httpx for both synchronous and asynchronous HTTP requests, + providing support for predictions, streaming responses, and batch processing. + + Attributes: + api_key (str): DeepInfra API key for authentication + Can be obtained from: https://deepinfra.com/dash/api_keys + + allowed_models (List[str]): List of supported model identifiers on DeepInfra + Full list available at: https://deepinfra.com/models/text-generation + + name (str): The currently selected model name + Defaults to "Qwen/Qwen2-72B-Instruct" + + type (Literal["DeepInfraModel"]): Type identifier for the model class + + Link to Allowed Models: https://deepinfra.com/models/text-generation + Link to API KEY: https://deepinfra.com/dash/api_keys + """ + + _BASE_URL: str = PrivateAttr("http://localhost:8080/v1") + _client: httpx.Client = PrivateAttr(default=None) + _async_client: httpx.AsyncClient = PrivateAttr(default=None) + + api_key: Optional[SecretStr] = None + allowed_models: List[str] = [] + + name: str = "" + + type: Literal["LlamaCppModel"] = "LlamaCppModel" + + timeout: float = 600.0 + + def __init__(self, **data): + """ + Initializes the DeepInfraModel instance with the provided API key + and sets up httpx clients for both sync and async operations. + + Args: + **data: Keyword arguments for model initialization. + """ + super().__init__(**data) + if self.api_key: + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.api_key.get_secret_value()}", + } + else: + headers = {"Content-Type": "application/json"} + self._client = httpx.Client( + headers=headers, base_url=self._BASE_URL, timeout=self.timeout + ) + self._async_client = httpx.AsyncClient( + headers=headers, base_url=self._BASE_URL, timeout=self.timeout + ) + + self.allowed_models = self.allowed_models or self.get_allowed_models() + self.name = self.allowed_models[0] + + def _format_messages( + self, messages: List[SubclassUnion[MessageBase]] + ) -> List[Dict[str, str]]: + """ + Formats conversation history into a list of dictionaries suitable for API requests. + + Args: + messages (List[SubclassUnion[MessageBase]]): The conversation history. + + Returns: + List[Dict[str, str]]: Formatted message list. + """ + message_properties = ["content", "role", "name"] + return [ + message.model_dump(include=message_properties, exclude_none=True) + for message in messages + ] + + def _create_request_payload( + self, + formatted_messages: List[Dict[str, str]], + temperature: float, + max_tokens: int, + enable_json: bool, + stop: List[str] = None, + stream: bool = False, + ) -> Dict: + """ + Creates the payload for the API request. + + Args: + formatted_messages (List[Dict[str, str]]): Formatted messages for the conversation. + temperature (float): Sampling temperature for the response. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Whether to enable JSON response format. + stop (List[str], optional): Stop sequences. + stream (bool): Whether to stream the response. + + Returns: + Dict: Payload for the API request. + """ + payload = { + "model": self.name, + "messages": formatted_messages, + "temperature": temperature, + "max_tokens": max_tokens, + "top_p": 1, + "frequency_penalty": 0, + "presence_penalty": 0, + "stream": stream, + } + + if stop: + payload["stop"] = stop + + if enable_json: + payload["response_format"] = {"type": "json_object"} + + return payload + + @retry_on_status_codes((429, 529), max_retries=1) + def predict( + self, + conversation, + temperature=0.7, + max_tokens=256, + enable_json=False, + stop: List[str] = None, + ): + """ + Sends a synchronous request to generate a response from the model. + + Args: + conversation: The conversation object containing message history. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Flag for enabling JSON response format. + stop (List[str], optional): Stop sequences for the response. + + Returns: + Updated conversation with the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = self._create_request_payload( + formatted_messages, temperature, max_tokens, enable_json, stop + ) + + response = self._client.post("/chat/completions", json=payload) + response.raise_for_status() + + result = response.json() + message_content = result["choices"][0]["message"]["content"] + conversation.add_message(AgentMessage(content=message_content)) + + return conversation + + @retry_on_status_codes((429, 529), max_retries=1) + async def apredict( + self, + conversation, + temperature=0.7, + max_tokens=256, + enable_json=False, + stop: List[str] = None, + ): + """ + Sends an asynchronous request to generate a response from the model. + + Args: + conversation: The conversation object containing message history. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Flag for enabling JSON response format. + stop (List[str], optional): Stop sequences for the response. + + Returns: + Updated conversation with the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = self._create_request_payload( + formatted_messages, temperature, max_tokens, enable_json, stop + ) + + response = await self._async_client.post("/chat/completions", json=payload) + response.raise_for_status() + + result = response.json() + message_content = result["choices"][0]["message"]["content"] + conversation.add_message(AgentMessage(content=message_content)) + + return conversation + + @retry_on_status_codes((429, 529), max_retries=1) + def stream( + self, + conversation, + temperature=0.7, + max_tokens=256, + stop: List[str] = None, + ) -> Iterator[str]: + """ + Streams response content from the model synchronously. + + Args: + conversation: The conversation object containing message history. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + stop (List[str], optional): Stop sequences for the response. + + Yields: + str: Chunks of content from the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = self._create_request_payload( + formatted_messages, temperature, max_tokens, False, stop, stream=True + ) + + with self._client.stream("POST", "/chat/completions", json=payload) as response: + response.raise_for_status() + collected_content = [] + + for line in response.iter_lines(): + # Convert bytes to string if necessary + if isinstance(line, bytes): + line = line.decode("utf-8") + + if line.startswith("data: "): + line = line[6:] # Remove 'data: ' prefix + if line != "[DONE]": + chunk = json.loads(line) + if chunk["choices"][0]["delta"].get("content"): + content = chunk["choices"][0]["delta"]["content"] + collected_content.append(content) + yield content + + full_content = "".join(collected_content) + conversation.add_message(AgentMessage(content=full_content)) + + @retry_on_status_codes((429, 529), max_retries=1) + async def astream( + self, + conversation, + temperature=0.7, + max_tokens=256, + stop: List[str] = None, + ) -> AsyncIterator[str]: + """ + Streams response content from the model asynchronously. + + Args: + conversation: The conversation object containing message history. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + stop (List[str], optional): Stop sequences for the response. + + Yields: + str: Chunks of content from the model's response. + """ + formatted_messages = self._format_messages(conversation.history) + payload = self._create_request_payload( + formatted_messages, temperature, max_tokens, False, stop, stream=True + ) + + async with self._async_client.stream( + "POST", "/chat/completions", json=payload + ) as response: + response.raise_for_status() + collected_content = [] + + async for line in response.aiter_lines(): + if line.startswith("data: "): + line = line[6:] # Remove 'data: ' prefix + if line != "[DONE]": + chunk = json.loads(line) + if chunk["choices"][0]["delta"].get("content"): + content = chunk["choices"][0]["delta"]["content"] + collected_content.append(content) + yield content + + full_content = "".join(collected_content) + conversation.add_message(AgentMessage(content=full_content)) + + def batch( + self, + conversations: List, + temperature=0.7, + max_tokens=256, + enable_json=False, + stop: List[str] = None, + ) -> List: + """ + Processes multiple conversations in batch synchronously. + + Args: + conversations (List): List of conversation objects. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Flag for enabling JSON response format. + stop (List[str], optional): Stop sequences for responses. + + Returns: + List: List of updated conversations with model responses. + """ + return [ + self.predict( + conv, + temperature=temperature, + max_tokens=max_tokens, + enable_json=enable_json, + stop=stop, + ) + for conv in conversations + ] + + async def abatch( + self, + conversations: List, + temperature=0.7, + max_tokens=256, + enable_json=False, + stop: List[str] = None, + max_concurrent=5, + ) -> List: + """ + Processes multiple conversations asynchronously, with concurrency control. + + Args: + conversations (List): List of conversation objects. + temperature (float): Sampling temperature for response generation. + max_tokens (int): Maximum number of tokens to generate. + enable_json (bool): Flag for enabling JSON response format. + stop (List[str], optional): Stop sequences for responses. + max_concurrent (int): Maximum number of concurrent tasks. + + Returns: + List: List of updated conversations with model responses. + """ + semaphore = asyncio.Semaphore(max_concurrent) + + async def process_conversation(conv): + async with semaphore: + return await self.apredict( + conv, + temperature=temperature, + max_tokens=max_tokens, + enable_json=enable_json, + stop=stop, + ) + + tasks = [process_conversation(conv) for conv in conversations] + return await asyncio.gather(*tasks) + + def get_allowed_models(self) -> List[str]: + """ + Queries the LLMProvider API endpoint to get the list of allowed models. + + Returns: + List[str]: List of allowed model identifiers. + """ + response = self._client.get("/models") + response.raise_for_status() + models = response.json() + return [model["id"] for model in models] diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/MistralModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/MistralModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/MistralModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/MistralModel.py index 7c65cd342..27e242cb2 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/MistralModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/MistralModel.py @@ -58,7 +58,7 @@ def __init__(self, **data): base_url=self._BASE_URL, timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/MistralToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/MistralToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/MistralToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/MistralToolModel.py index 0eb7352a9..b04a4d402 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/MistralToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/MistralToolModel.py @@ -70,7 +70,7 @@ def __init__(self, **data) -> None: base_url=self._BASE_URL, timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIAudio.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIAudio.py similarity index 98% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIAudio.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIAudio.py index 555a6c1b1..79dcd00ca 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIAudio.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIAudio.py @@ -54,7 +54,7 @@ def __init__(self, **data): base_url=self._BASE_URL, timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @retry_on_status_codes((429, 529), max_retries=1) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIAudioTTS.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIAudioTTS.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIAudioTTS.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIAudioTTS.py index fa6a1e8b3..88e79ceae 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIAudioTTS.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIAudioTTS.py @@ -61,7 +61,7 @@ def __init__(self, **data): "Authorization": f"Bearer {self.api_key.get_secret_value()}", "Content-Type": "application/json", } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @model_validator(mode="after") diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIModel.py index 2cc6d2079..b593c8205 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIModel.py @@ -60,7 +60,7 @@ def __init__(self, **data) -> None: "Authorization": f"Bearer {self.api_key.get_secret_value()}", "Content-Type": "application/json", } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIReasonModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIReasonModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIReasonModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIReasonModel.py index 636051f94..953ee6764 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIReasonModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIReasonModel.py @@ -49,7 +49,7 @@ def __init__(self, **data) -> None: "Authorization": f"Bearer {self.api_key.get_secret_value()}", "Content-Type": "application/json", } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIToolModel.py index 0cb2decc4..459a769df 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/OpenAIToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/OpenAIToolModel.py @@ -63,7 +63,7 @@ def __init__(self, **data): "Authorization": f"Bearer {self.api_key.get_secret_value()}", "Content-Type": "application/json", } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/PerplexityModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/PerplexityModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/PerplexityModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/PerplexityModel.py index 3030a0bba..5b098d122 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/PerplexityModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/PerplexityModel.py @@ -60,7 +60,7 @@ def __init__(self, **data): base_url=self._BASE_URL, timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/PlayHTModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/PlayHTModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/PlayHTModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/PlayHTModel.py index 0319ff770..071add8c5 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/PlayHTModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/PlayHTModel.py @@ -67,7 +67,7 @@ def __init__(self, **data) -> None: "AUTHORIZATION": self.api_key.get_secret_value(), "X-USER-ID": self.user_id, } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] self.__prebuilt_voices = self._fetch_prebuilt_voices() self.allowed_voices = self._get_allowed_voices(self.name) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/WhisperLargeModel.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/WhisperLargeModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/WhisperLargeModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/WhisperLargeModel.py index 717243698..072a120f2 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/WhisperLargeModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/llms/WhisperLargeModel.py @@ -66,7 +66,7 @@ def __init__(self, **data): super().__init__(**data) self._header = {"Authorization": f"Bearer {self.api_key.get_secret_value()}"} self._client = httpx.Client(header=self._header, timeout=self.timeout) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @retry_on_status_codes((429, 529), max_retries=1) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/llms/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/llms/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/llms/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/llms/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/CompletenessMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/CompletenessMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/CompletenessMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/CompletenessMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/DistinctivenessMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/DistinctivenessMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/DistinctivenessMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/DistinctivenessMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/FirstImpressionMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/FirstImpressionMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/FirstImpressionMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/FirstImpressionMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/MeanMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/MeanMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/MeanMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/MeanMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/MiscMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/MiscMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/MiscMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/MiscMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/MissingnessMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/MissingnessMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/MissingnessMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/MissingnessMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/PatternMatchingMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/PatternMatchingMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/PatternMatchingMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/PatternMatchingMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/RatioOfSumsMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/RatioOfSumsMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/RatioOfSumsMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/RatioOfSumsMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/StaticMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/StaticMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/StaticMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/StaticMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/UniquenessMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/UniquenessMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/UniquenessMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/UniquenessMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/ZeroMeasurement.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/ZeroMeasurement.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/ZeroMeasurement.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/ZeroMeasurement.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/measurements/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/measurements/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/measurements/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/messages/AgentMessage.py b/pkgs/swarmauri_standard/swarmauri_standard/messages/AgentMessage.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/messages/AgentMessage.py rename to pkgs/swarmauri_standard/swarmauri_standard/messages/AgentMessage.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/messages/FunctionMessage.py b/pkgs/swarmauri_standard/swarmauri_standard/messages/FunctionMessage.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/messages/FunctionMessage.py rename to pkgs/swarmauri_standard/swarmauri_standard/messages/FunctionMessage.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/messages/HumanMessage.py b/pkgs/swarmauri_standard/swarmauri_standard/messages/HumanMessage.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/messages/HumanMessage.py rename to pkgs/swarmauri_standard/swarmauri_standard/messages/HumanMessage.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/messages/SystemMessage.py b/pkgs/swarmauri_standard/swarmauri_standard/messages/SystemMessage.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/messages/SystemMessage.py rename to pkgs/swarmauri_standard/swarmauri_standard/messages/SystemMessage.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/messages/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/messages/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/messages/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/messages/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/CSVParser.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/CSVParser.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/CSVParser.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/CSVParser.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/HTMLTagStripParser.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/HTMLTagStripParser.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/HTMLTagStripParser.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/HTMLTagStripParser.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/Md2HtmlParser.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/Md2HtmlParser.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/Md2HtmlParser.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/Md2HtmlParser.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/OpenAPISpecParser.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/OpenAPISpecParser.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/OpenAPISpecParser.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/OpenAPISpecParser.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/PhoneNumberExtractorParser.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/PhoneNumberExtractorParser.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/PhoneNumberExtractorParser.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/PhoneNumberExtractorParser.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/PythonParser.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/PythonParser.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/PythonParser.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/PythonParser.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/RegExParser.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/URLExtractorParser.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/URLExtractorParser.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/URLExtractorParser.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/URLExtractorParser.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/XMLParser.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/XMLParser.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/XMLParser.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/XMLParser.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/parsers/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/parsers/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/parsers/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/pipelines/Pipeline.py b/pkgs/swarmauri_standard/swarmauri_standard/pipelines/Pipeline.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/pipelines/Pipeline.py rename to pkgs/swarmauri_standard/swarmauri_standard/pipelines/Pipeline.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/pipelines/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/pipelines/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/pipelines/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/pipelines/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/prompt_templates/PromptTemplate.py b/pkgs/swarmauri_standard/swarmauri_standard/prompt_templates/PromptTemplate.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/prompt_templates/PromptTemplate.py rename to pkgs/swarmauri_standard/swarmauri_standard/prompt_templates/PromptTemplate.py diff --git a/pkgs/swarmauri_standard/swarmauri_standard/prompt_templates/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/prompt_templates/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/prompts/Prompt.py b/pkgs/swarmauri_standard/swarmauri_standard/prompts/Prompt.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/prompts/Prompt.py rename to pkgs/swarmauri_standard/swarmauri_standard/prompts/Prompt.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/prompts/PromptGenerator.py b/pkgs/swarmauri_standard/swarmauri_standard/prompts/PromptGenerator.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/prompts/PromptGenerator.py rename to pkgs/swarmauri_standard/swarmauri_standard/prompts/PromptGenerator.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/prompts/PromptMatrix.py b/pkgs/swarmauri_standard/swarmauri_standard/prompts/PromptMatrix.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/prompts/PromptMatrix.py rename to pkgs/swarmauri_standard/swarmauri_standard/prompts/PromptMatrix.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/prompts/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/prompts/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/prompts/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/prompts/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/AnthropicSchemaConverter.py b/pkgs/swarmauri_standard/swarmauri_standard/schema_converters/AnthropicSchemaConverter.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/AnthropicSchemaConverter.py rename to pkgs/swarmauri_standard/swarmauri_standard/schema_converters/AnthropicSchemaConverter.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/CohereSchemaConverter.py b/pkgs/swarmauri_standard/swarmauri_standard/schema_converters/CohereSchemaConverter.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/CohereSchemaConverter.py rename to pkgs/swarmauri_standard/swarmauri_standard/schema_converters/CohereSchemaConverter.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/GeminiSchemaConverter.py b/pkgs/swarmauri_standard/swarmauri_standard/schema_converters/GeminiSchemaConverter.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/GeminiSchemaConverter.py rename to pkgs/swarmauri_standard/swarmauri_standard/schema_converters/GeminiSchemaConverter.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/GroqSchemaConverter.py b/pkgs/swarmauri_standard/swarmauri_standard/schema_converters/GroqSchemaConverter.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/GroqSchemaConverter.py rename to pkgs/swarmauri_standard/swarmauri_standard/schema_converters/GroqSchemaConverter.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/MistralSchemaConverter.py b/pkgs/swarmauri_standard/swarmauri_standard/schema_converters/MistralSchemaConverter.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/MistralSchemaConverter.py rename to pkgs/swarmauri_standard/swarmauri_standard/schema_converters/MistralSchemaConverter.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/OpenAISchemaConverter.py b/pkgs/swarmauri_standard/swarmauri_standard/schema_converters/OpenAISchemaConverter.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/OpenAISchemaConverter.py rename to pkgs/swarmauri_standard/swarmauri_standard/schema_converters/OpenAISchemaConverter.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/ShuttleAISchemaConverter.py b/pkgs/swarmauri_standard/swarmauri_standard/schema_converters/ShuttleAISchemaConverter.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/ShuttleAISchemaConverter.py rename to pkgs/swarmauri_standard/swarmauri_standard/schema_converters/ShuttleAISchemaConverter.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/schema_converters/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/schema_converters/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/schema_converters/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/service_registries/ServiceRegistry.py b/pkgs/swarmauri_standard/swarmauri_standard/service_registries/ServiceRegistry.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/service_registries/ServiceRegistry.py rename to pkgs/swarmauri_standard/swarmauri_standard/service_registries/ServiceRegistry.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/service_registries/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/service_registries/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/service_registries/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/service_registries/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/state/DictState.py b/pkgs/swarmauri_standard/swarmauri_standard/state/DictState.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/state/DictState.py rename to pkgs/swarmauri_standard/swarmauri_standard/state/DictState.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/state/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/state/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/state/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/state/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/stt/GroqSTT.py b/pkgs/swarmauri_standard/swarmauri_standard/stt/GroqSTT.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/stt/GroqSTT.py rename to pkgs/swarmauri_standard/swarmauri_standard/stt/GroqSTT.py index 429c3fb56..c0886e504 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/stt/GroqSTT.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/stt/GroqSTT.py @@ -51,7 +51,7 @@ def __init__(self, **data): base_url=self._BASE_URL, timeout=30, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @retry_on_status_codes((429, 529), max_retries=1) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/stt/OpenaiSTT.py b/pkgs/swarmauri_standard/swarmauri_standard/stt/OpenaiSTT.py similarity index 98% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/stt/OpenaiSTT.py rename to pkgs/swarmauri_standard/swarmauri_standard/stt/OpenaiSTT.py index 33f5a2987..0eb35ade1 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/stt/OpenaiSTT.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/stt/OpenaiSTT.py @@ -51,7 +51,7 @@ def __init__(self, **data): headers={"Authorization": f"Bearer {self.api_key.get_secret_value()}"}, base_url=self._BASE_URL, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @retry_on_status_codes((429, 529), max_retries=1) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/stt/WhisperLargeSTT.py b/pkgs/swarmauri_standard/swarmauri_standard/stt/WhisperLargeSTT.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/stt/WhisperLargeSTT.py rename to pkgs/swarmauri_standard/swarmauri_standard/stt/WhisperLargeSTT.py index fa37060c6..8d16cf53d 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/stt/WhisperLargeSTT.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/stt/WhisperLargeSTT.py @@ -56,7 +56,7 @@ def __init__(self, **data): super().__init__(**data) self._header = {"Authorization": f"Bearer {self.api_key.get_secret_value()}"} self._client = httpx.Client(header=self._header, timeout=30) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @retry_on_status_codes((429, 529), max_retries=1) diff --git a/pkgs/swarmauri_standard/swarmauri_standard/stt/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/stt/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/swarms/Swarm.py b/pkgs/swarmauri_standard/swarmauri_standard/swarms/Swarm.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/swarms/Swarm.py rename to pkgs/swarmauri_standard/swarmauri_standard/swarms/Swarm.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/swarms/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/swarms/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/swarms/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/swarms/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/task_mgmt_strategies/RoundRobinStrategy.py b/pkgs/swarmauri_standard/swarmauri_standard/task_mgmt_strategies/RoundRobinStrategy.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/task_mgmt_strategies/RoundRobinStrategy.py rename to pkgs/swarmauri_standard/swarmauri_standard/task_mgmt_strategies/RoundRobinStrategy.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/task_mgmt_strategies/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/task_mgmt_strategies/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/task_mgmt_strategies/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/task_mgmt_strategies/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/AnthropicToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/AnthropicToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/AnthropicToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/tool_llms/AnthropicToolModel.py index 64bc2e0ad..62f58f1d0 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/AnthropicToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/AnthropicToolModel.py @@ -60,7 +60,7 @@ def __init__(self, **data): self._async_client = httpx.AsyncClient( headers=headers, base_url=self._BASE_URL, timeout=self.timeout ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/CohereToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/CohereToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/CohereToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/tool_llms/CohereToolModel.py index 703048bf6..77a31c5d7 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/CohereToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/CohereToolModel.py @@ -66,7 +66,7 @@ def __init__(self, **data): self._async_client = httpx.AsyncClient( headers=headers, base_url=self._BASE_URL, timeout=self.timeout ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/GeminiToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/GeminiToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/GeminiToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/tool_llms/GeminiToolModel.py index 73b31b2e4..e6a96cae2 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/GeminiToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/GeminiToolModel.py @@ -73,7 +73,7 @@ def __init__(self, api_key: SecretStr, name: str): name (str): The name of the Gemini model in use. """ self.api_key = api_key - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/GroqToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/GroqToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/GroqToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/tool_llms/GroqToolModel.py index 0c69b81f9..1019116e4 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/GroqToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/GroqToolModel.py @@ -66,7 +66,7 @@ def __init__(self, **data): timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/MistralToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/MistralToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/MistralToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/tool_llms/MistralToolModel.py index 4fc0587ae..b6c80de81 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/MistralToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/MistralToolModel.py @@ -61,7 +61,7 @@ def __init__(self, **data) -> None: base_url=self._BASE_URL, timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/OpenAIToolModel.py b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/OpenAIToolModel.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/OpenAIToolModel.py rename to pkgs/swarmauri_standard/swarmauri_standard/tool_llms/OpenAIToolModel.py index d3e17a367..dfae7a233 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tool_llms/OpenAIToolModel.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/OpenAIToolModel.py @@ -54,7 +54,7 @@ def __init__(self, **data): "Authorization": f"Bearer {self.api_key.get_secret_value()}", "Content-Type": "application/json", } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _schema_convert_tools(self, tools) -> List[Dict[str, Any]]: diff --git a/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/tool_llms/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/toolkits/AccessibilityToolkit.py b/pkgs/swarmauri_standard/swarmauri_standard/toolkits/AccessibilityToolkit.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/toolkits/AccessibilityToolkit.py rename to pkgs/swarmauri_standard/swarmauri_standard/toolkits/AccessibilityToolkit.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/toolkits/Toolkit.py b/pkgs/swarmauri_standard/swarmauri_standard/toolkits/Toolkit.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/toolkits/Toolkit.py rename to pkgs/swarmauri_standard/swarmauri_standard/toolkits/Toolkit.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/toolkits/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/toolkits/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/toolkits/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/toolkits/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/AdditionTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/AdditionTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/AdditionTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/AdditionTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/AutomatedReadabilityIndexTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/AutomatedReadabilityIndexTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/AutomatedReadabilityIndexTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/AutomatedReadabilityIndexTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/CalculatorTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/CodeExtractorTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/CodeExtractorTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/CodeExtractorTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/CodeExtractorTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/CodeInterpreterTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/CodeInterpreterTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/CodeInterpreterTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/CodeInterpreterTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/ColemanLiauIndexTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/ColemanLiauIndexTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/ColemanLiauIndexTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/ColemanLiauIndexTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/FleschKincaidTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/FleschKincaidTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/FleschKincaidTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/FleschKincaidTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/FleschReadingEaseTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/FleschReadingEaseTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/FleschReadingEaseTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/FleschReadingEaseTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/GunningFogTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/GunningFogTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/GunningFogTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/GunningFogTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/ImportMemoryModuleTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/ImportMemoryModuleTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/ImportMemoryModuleTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/ImportMemoryModuleTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/JSONRequestsTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/JSONRequestsTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/JSONRequestsTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/JSONRequestsTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/Parameter.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/Parameter.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/Parameter.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/Parameter.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/RequestsTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/RequestsTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/RequestsTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/RequestsTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/TemperatureConverterTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/TemperatureConverterTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/TemperatureConverterTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/TemperatureConverterTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/TestTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/TestTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/TestTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/TestTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/WeatherTool.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/WeatherTool.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/WeatherTool.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/WeatherTool.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tools/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/tools/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tools/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/tools/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/CallableTracer.py b/pkgs/swarmauri_standard/swarmauri_standard/tracing/CallableTracer.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/CallableTracer.py rename to pkgs/swarmauri_standard/swarmauri_standard/tracing/CallableTracer.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/ChainTracer.py b/pkgs/swarmauri_standard/swarmauri_standard/tracing/ChainTracer.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/ChainTracer.py rename to pkgs/swarmauri_standard/swarmauri_standard/tracing/ChainTracer.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/SimpleTraceContext.py b/pkgs/swarmauri_standard/swarmauri_standard/tracing/SimpleTraceContext.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/SimpleTraceContext.py rename to pkgs/swarmauri_standard/swarmauri_standard/tracing/SimpleTraceContext.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/SimpleTracer.py b/pkgs/swarmauri_standard/swarmauri_standard/tracing/SimpleTracer.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/SimpleTracer.py rename to pkgs/swarmauri_standard/swarmauri_standard/tracing/SimpleTracer.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/TracedVariable.py b/pkgs/swarmauri_standard/swarmauri_standard/tracing/TracedVariable.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/TracedVariable.py rename to pkgs/swarmauri_standard/swarmauri_standard/tracing/TracedVariable.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/VariableTracer.py b/pkgs/swarmauri_standard/swarmauri_standard/tracing/VariableTracer.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/VariableTracer.py rename to pkgs/swarmauri_standard/swarmauri_standard/tracing/VariableTracer.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/tracing/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tracing/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/tracing/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/transports/PubSubTransport.py b/pkgs/swarmauri_standard/swarmauri_standard/transports/PubSubTransport.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/transports/PubSubTransport.py rename to pkgs/swarmauri_standard/swarmauri_standard/transports/PubSubTransport.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/transports/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/transports/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/transports/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/transports/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tts/HyperbolicTTS.py b/pkgs/swarmauri_standard/swarmauri_standard/tts/HyperbolicTTS.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tts/HyperbolicTTS.py rename to pkgs/swarmauri_standard/swarmauri_standard/tts/HyperbolicTTS.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tts/OpenaiTTS.py b/pkgs/swarmauri_standard/swarmauri_standard/tts/OpenaiTTS.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tts/OpenaiTTS.py rename to pkgs/swarmauri_standard/swarmauri_standard/tts/OpenaiTTS.py index 966091a8c..22bf5a9c8 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tts/OpenaiTTS.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/tts/OpenaiTTS.py @@ -51,7 +51,7 @@ def __init__(self, **data): "Authorization": f"Bearer {self.api_key.get_secret_value()}", "Content-Type": "application/json", } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @model_validator(mode="after") diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/tts/PlayhtTTS.py b/pkgs/swarmauri_standard/swarmauri_standard/tts/PlayhtTTS.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/tts/PlayhtTTS.py rename to pkgs/swarmauri_standard/swarmauri_standard/tts/PlayhtTTS.py index 874001708..7d7a8d952 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/tts/PlayhtTTS.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/tts/PlayhtTTS.py @@ -57,7 +57,7 @@ def __init__(self, **data) -> None: "AUTHORIZATION": self.api_key.get_secret_value(), "X-USER-ID": self.user_id, } - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] self.__prebuilt_voices = self._fetch_prebuilt_voices() self.allowed_voices = self._get_allowed_voices(self.name) diff --git a/pkgs/swarmauri_standard/swarmauri_standard/tts/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/tts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/swarmauri_standard/swarmauri_standard/utils/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/_get_subclasses.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/_get_subclasses.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/_get_subclasses.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/_get_subclasses.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/_lazy_import.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/_lazy_import.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/_lazy_import.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/_lazy_import.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/apply_metaclass.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/apply_metaclass.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/apply_metaclass.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/apply_metaclass.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/base64_encoder.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/base64_encoder.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/base64_encoder.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/base64_encoder.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/base64_to_file_path.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/base64_to_file_path.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/base64_to_file_path.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/base64_to_file_path.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/base64_to_img_url.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/base64_to_img_url.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/base64_to_img_url.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/base64_to_img_url.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/base64_to_in_memory_img.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/base64_to_in_memory_img.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/base64_to_in_memory_img.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/base64_to_in_memory_img.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/decorate.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/decorate.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/decorate.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/decorate.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/duration_manager.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/duration_manager.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/duration_manager.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/duration_manager.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/file_path_to_base64.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/file_path_to_base64.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/file_path_to_base64.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/file_path_to_base64.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/file_path_to_img_url.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/file_path_to_img_url.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/file_path_to_img_url.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/file_path_to_img_url.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/file_path_to_in_memory_img.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/file_path_to_in_memory_img.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/file_path_to_in_memory_img.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/file_path_to_in_memory_img.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/get_class_hash.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/get_class_hash.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/get_class_hash.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/get_class_hash.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/img_url_to_base64.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/img_url_to_base64.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/img_url_to_base64.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/img_url_to_base64.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/img_url_to_file_path.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/img_url_to_file_path.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/img_url_to_file_path.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/img_url_to_file_path.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/img_url_to_in_memory_img.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/img_url_to_in_memory_img.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/img_url_to_in_memory_img.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/img_url_to_in_memory_img.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_base64.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_base64.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_base64.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_base64.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_file_path.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_file_path.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_file_path.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_file_path.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_img_url.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_img_url.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_img_url.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/in_memory_img_to_img_url.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/json_validator.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/json_validator.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/json_validator.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/json_validator.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/load_documents_from_folder.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/load_documents_from_folder.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/load_documents_from_folder.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/load_documents_from_folder.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/load_documents_from_json.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/load_documents_from_json.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/load_documents_from_json.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/load_documents_from_json.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/memoize.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/memoize.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/memoize.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/memoize.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/method_signature_extractor_decorator.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/method_signature_extractor_decorator.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/method_signature_extractor_decorator.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/method_signature_extractor_decorator.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/print_notebook_metadata.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/print_notebook_metadata.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/print_notebook_metadata.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/print_notebook_metadata.py diff --git a/pkgs/swarmauri_standard/swarmauri_standard/utils/retry_decorator.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/retry_decorator.py new file mode 100644 index 000000000..a3c840122 --- /dev/null +++ b/pkgs/swarmauri_standard/swarmauri_standard/utils/retry_decorator.py @@ -0,0 +1,94 @@ +import time +import logging +import httpx +from functools import wraps +from typing import List, Callable, Any +import asyncio +import inspect + + +def retry_on_status_codes( + status_codes: List[int] = [429], max_retries: int = 3, retry_delay: int = 2 +): + """ + A decorator to retry both sync and async functions when specific status codes are encountered, + with exponential backoff. + """ + + def decorator(func: Callable[..., Any]) -> Callable[..., Any]: + @wraps(func) + async def async_wrapper(*args: Any, **kwargs: Any) -> Any: + last_exception = None + attempt = 0 + while attempt < max_retries: + try: + return await func(*args, **kwargs) + except httpx.HTTPStatusError as e: + if e.response.status_code in status_codes: + attempt += 1 + last_exception = e + if attempt == max_retries: + break + backoff_time = retry_delay * (2 ** (attempt - 1)) + logging.warning( + f"Retry attempt {attempt}/{max_retries}: " + f"Received HTTP {e.response.status_code} for {func.__name__}. " + f"Retrying in {backoff_time:.2f} seconds. " + f"Original error: {str(e)}" + ) + await asyncio.sleep(backoff_time) + else: + raise + + if last_exception: + error_message = ( + f"Request to {func.__name__} failed after {max_retries} retries. " + f"Last encountered status code: {last_exception.response.status_code}. " + f"Last error details: {str(last_exception)}" + ) + logging.error(error_message) + raise Exception(error_message) + raise RuntimeError( + f"Unexpected error in retry mechanism for {func.__name__}" + ) + + @wraps(func) + def sync_wrapper(*args: Any, **kwargs: Any) -> Any: + last_exception = None + attempt = 0 + while attempt < max_retries: + try: + return func(*args, **kwargs) + except httpx.HTTPStatusError as e: + if e.response.status_code in status_codes: + attempt += 1 + last_exception = e + if attempt == max_retries: + break + backoff_time = retry_delay * (2 ** (attempt - 1)) + logging.warning( + f"Retry attempt {attempt}/{max_retries}: " + f"Received HTTP {e.response.status_code} for {func.__name__}. " + f"Retrying in {backoff_time:.2f} seconds. " + f"Original error: {str(e)}" + ) + time.sleep(backoff_time) + else: + raise + + if last_exception: + error_message = ( + f"Request to {func.__name__} failed after {max_retries} retries. " + f"Last encountered status code: {last_exception.response.status_code}. " + f"Last error details: {str(last_exception)}" + ) + logging.error(error_message) + raise Exception(error_message) + raise RuntimeError( + f"Unexpected error in retry mechanism for {func.__name__}" + ) + + # Check if the function is async or sync and return appropriate wrapper + return async_wrapper if inspect.iscoroutinefunction(func) else sync_wrapper + + return decorator diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/sql_log.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/sql_log.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/sql_log.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/sql_log.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/utils/timeout_wrapper.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/timeout_wrapper.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/utils/timeout_wrapper.py rename to pkgs/swarmauri_standard/swarmauri_standard/utils/timeout_wrapper.py diff --git a/pkgs/swarmauri_standard/swarmauri_standard/utils/tool_decorator.py b/pkgs/swarmauri_standard/swarmauri_standard/utils/tool_decorator.py new file mode 100644 index 000000000..1da75ba91 --- /dev/null +++ b/pkgs/swarmauri_standard/swarmauri_standard/utils/tool_decorator.py @@ -0,0 +1,63 @@ +import inspect +from typing import get_type_hints, List, Any +from pydantic import Field + +from swarmauri_standard.tools.Parameter import Parameter +from swarmauri_base.tools.ToolBase import ToolBase +from swarmauri_core.ComponentBase import ComponentBase + + +def tool(func): + """ + Decorator that creates a dynamic ToolBase subclass from the decorated function. + + The generated tool will: + - Use the function name as the `name` and `type` of the tool. + - Derive parameters from the function's signature and type hints. + - Use the function's docstring as the tool's description. + """ + # Capture function name and docstring + func_name = func.__name__ + docstring = func.__doc__ or "" + + # Inspect the function signature for parameter names, defaults, etc. + signature = inspect.signature(func) + type_hints = get_type_hints(func) + + # Build the list of Parameter objects from the function signature + parameters_list: List[Parameter] = [] + for param_name, param in signature.parameters.items(): + # If the parameter has a type annotation, grab it; otherwise use "string" as default + annotated_type = type_hints.get(param_name, str) + + # Derive a required flag by checking if the parameter has a default + required = param.default == inspect.Parameter.empty + + # Use the parameter’s name, the string version of the annotated type, etc. + parameters_list.append( + Parameter( + name=param_name, + type=annotated_type.__name__, + description=f"Parameter for {param_name}", + required=required, + ) + ) + + # Dynamically create the subclass of ToolBase + @ComponentBase.register_type(ToolBase, func_name) + class FunctionTool(ToolBase): + version: str = "1.0.0" + parameters: List[Parameter] = Field(default_factory=lambda: parameters_list) + name: str = func_name + description: str = docstring + # The tool type is set to be the same as the function name + type: str = func_name + + def __call__(self, *args, **kwargs) -> Any: + """ + Invoke the underlying function with the provided arguments. + """ + return func(*args, **kwargs) + + # Return an *instance* of this generated class (or the class itself) + return FunctionTool() diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/vector_stores/TfidfVectorStore.py b/pkgs/swarmauri_standard/swarmauri_standard/vector_stores/TfidfVectorStore.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/vector_stores/TfidfVectorStore.py rename to pkgs/swarmauri_standard/swarmauri_standard/vector_stores/TfidfVectorStore.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/vector_stores/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/vector_stores/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/vector_stores/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/vector_stores/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/vectors/Vector.py b/pkgs/swarmauri_standard/swarmauri_standard/vectors/Vector.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/vectors/Vector.py rename to pkgs/swarmauri_standard/swarmauri_standard/vectors/Vector.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/vectors/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/vectors/__init__.py similarity index 100% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/vectors/__init__.py rename to pkgs/swarmauri_standard/swarmauri_standard/vectors/__init__.py diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/FalVLM.py b/pkgs/swarmauri_standard/swarmauri_standard/vlms/FalVLM.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/FalVLM.py rename to pkgs/swarmauri_standard/swarmauri_standard/vlms/FalVLM.py index 35d80017e..b5c9b9a38 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/FalVLM.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/vlms/FalVLM.py @@ -57,7 +57,7 @@ def __init__(self, **data): "Authorization": f"Key {self.api_key.get_secret_value()}", } self._client = httpx.Client(headers=self._headers, timeout=30) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] @retry_on_status_codes((429, 529), max_retries=1) diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/GroqVLM.py b/pkgs/swarmauri_standard/swarmauri_standard/vlms/GroqVLM.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/GroqVLM.py rename to pkgs/swarmauri_standard/swarmauri_standard/vlms/GroqVLM.py index ce81a91dc..e2fa5edd1 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/GroqVLM.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/vlms/GroqVLM.py @@ -58,7 +58,7 @@ def __init__(self, **data): headers={"Authorization": f"Bearer {self.api_key.get_secret_value()}"}, base_url=self._BASE_URL, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/HyperbolicVLM.py b/pkgs/swarmauri_standard/swarmauri_standard/vlms/HyperbolicVLM.py similarity index 99% rename from pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/HyperbolicVLM.py rename to pkgs/swarmauri_standard/swarmauri_standard/vlms/HyperbolicVLM.py index 72884e19e..c99fbb38f 100644 --- a/pkgs/standards/swarmauri_standard/swarmauri_standard/vlms/HyperbolicVLM.py +++ b/pkgs/swarmauri_standard/swarmauri_standard/vlms/HyperbolicVLM.py @@ -57,7 +57,7 @@ def __init__(self, **data): base_url=self._BASE_URL, timeout=self.timeout, ) - self.allowed_models = self.get_allowed_models() + self.allowed_models = self.allowed_models or self.get_allowed_models() self.name = self.allowed_models[0] def _format_messages( diff --git a/pkgs/swarmauri_standard/swarmauri_standard/vlms/__init__.py b/pkgs/swarmauri_standard/swarmauri_standard/vlms/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/standards/swarmauri_standard/tests/README.md b/pkgs/swarmauri_standard/tests/README.md similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/README.md rename to pkgs/swarmauri_standard/tests/README.md diff --git a/pkgs/standards/swarmauri_standard/tests/integration/agents/RagAgent_i9n_test.py b/pkgs/swarmauri_standard/tests/i9n/agents/RagAgent_i9n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/integration/agents/RagAgent_i9n_test.py rename to pkgs/swarmauri_standard/tests/i9n/agents/RagAgent_i9n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/integration/llms/AI21StudioModel_i9n_test.py b/pkgs/swarmauri_standard/tests/i9n/llms/AI21StudioModel_i9n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/integration/llms/AI21StudioModel_i9n_test.py rename to pkgs/swarmauri_standard/tests/i9n/llms/AI21StudioModel_i9n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/integration/llms/AnthropicModel_i9n_test.py b/pkgs/swarmauri_standard/tests/i9n/llms/AnthropicModel_i9n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/integration/llms/AnthropicModel_i9n_test.py rename to pkgs/swarmauri_standard/tests/i9n/llms/AnthropicModel_i9n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/integration/llms/CohereModel_i9n_test.py b/pkgs/swarmauri_standard/tests/i9n/llms/CohereModel_i9n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/integration/llms/CohereModel_i9n_test.py rename to pkgs/swarmauri_standard/tests/i9n/llms/CohereModel_i9n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/integration/llms/DeepInfraModel_i9n_test.py b/pkgs/swarmauri_standard/tests/i9n/llms/DeepInfraModel_i9n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/integration/llms/DeepInfraModel_i9n_test.py rename to pkgs/swarmauri_standard/tests/i9n/llms/DeepInfraModel_i9n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/integration/llms/DeepSeekModel_i9n_test.py b/pkgs/swarmauri_standard/tests/i9n/llms/DeepSeekModel_i9n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/integration/llms/DeepSeekModel_i9n_test.py rename to pkgs/swarmauri_standard/tests/i9n/llms/DeepSeekModel_i9n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/integration/llms/GroqModel_i9n_test.py b/pkgs/swarmauri_standard/tests/i9n/llms/GroqModel_i9n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/integration/llms/GroqModel_i9n_test.py rename to pkgs/swarmauri_standard/tests/i9n/llms/GroqModel_i9n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/integration/llms/OpenAIModel_i9n_test.py b/pkgs/swarmauri_standard/tests/i9n/llms/OpenAIModel_i9n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/integration/llms/OpenAIModel_i9n_test.py rename to pkgs/swarmauri_standard/tests/i9n/llms/OpenAIModel_i9n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/AnthropicToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/r8n/llms/AnthropicToolModel_r8n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/AnthropicToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/r8n/llms/AnthropicToolModel_r8n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/CohereToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/r8n/llms/CohereToolModel_r8n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/CohereToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/r8n/llms/CohereToolModel_r8n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/GeminiToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/r8n/llms/GeminiToolModel_r8n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/GeminiToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/r8n/llms/GeminiToolModel_r8n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/GroqToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/r8n/llms/GroqToolModel_r8n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/GroqToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/r8n/llms/GroqToolModel_r8n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/MistralToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/r8n/llms/MistralToolModel_r8n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/MistralToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/r8n/llms/MistralToolModel_r8n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/r8n/llms/OpenAIToolModel_r8n_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/r8n/llms/OpenAIToolModel_r8n_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/static/cityscape.png b/pkgs/swarmauri_standard/tests/static/cityscape.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/cityscape.png rename to pkgs/swarmauri_standard/tests/static/cityscape.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/credentials.json b/pkgs/swarmauri_standard/tests/static/credentials.json similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/credentials.json rename to pkgs/swarmauri_standard/tests/static/credentials.json diff --git a/pkgs/standards/swarmauri_standard/tests/static/generated_image.png b/pkgs/swarmauri_standard/tests/static/generated_image.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/generated_image.png rename to pkgs/swarmauri_standard/tests/static/generated_image.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/generated_image.png 12-20-10-721 PM.png b/pkgs/swarmauri_standard/tests/static/generated_image.png 12-20-10-721 PM.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/generated_image.png 12-20-10-721 PM.png rename to pkgs/swarmauri_standard/tests/static/generated_image.png 12-20-10-721 PM.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/generated_image_0.png b/pkgs/swarmauri_standard/tests/static/generated_image_0.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/generated_image_0.png rename to pkgs/swarmauri_standard/tests/static/generated_image_0.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/generated_image_0.png 12-19-54-478 PM.png b/pkgs/swarmauri_standard/tests/static/generated_image_0.png 12-19-54-478 PM.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/generated_image_0.png 12-19-54-478 PM.png rename to pkgs/swarmauri_standard/tests/static/generated_image_0.png 12-19-54-478 PM.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/generated_image_1.png b/pkgs/swarmauri_standard/tests/static/generated_image_1.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/generated_image_1.png rename to pkgs/swarmauri_standard/tests/static/generated_image_1.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/generated_image_1.png 12-19-48-249 PM.png b/pkgs/swarmauri_standard/tests/static/generated_image_1.png 12-19-48-249 PM.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/generated_image_1.png 12-19-48-249 PM.png rename to pkgs/swarmauri_standard/tests/static/generated_image_1.png 12-19-48-249 PM.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/generated_image_2.png b/pkgs/swarmauri_standard/tests/static/generated_image_2.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/generated_image_2.png rename to pkgs/swarmauri_standard/tests/static/generated_image_2.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/generated_image_2.png 12-20-03-172 PM.png b/pkgs/swarmauri_standard/tests/static/generated_image_2.png 12-20-03-172 PM.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/generated_image_2.png 12-20-03-172 PM.png rename to pkgs/swarmauri_standard/tests/static/generated_image_2.png 12-20-03-172 PM.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/hyperbolic_test2.mp3 b/pkgs/swarmauri_standard/tests/static/hyperbolic_test2.mp3 similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/hyperbolic_test2.mp3 rename to pkgs/swarmauri_standard/tests/static/hyperbolic_test2.mp3 diff --git a/pkgs/standards/swarmauri_standard/tests/static/hyperbolic_test3.mp3 b/pkgs/swarmauri_standard/tests/static/hyperbolic_test3.mp3 similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/hyperbolic_test3.mp3 rename to pkgs/swarmauri_standard/tests/static/hyperbolic_test3.mp3 diff --git a/pkgs/standards/swarmauri_standard/tests/static/hyperbolic_test_tts.mp3 b/pkgs/swarmauri_standard/tests/static/hyperbolic_test_tts.mp3 similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/hyperbolic_test_tts.mp3 rename to pkgs/swarmauri_standard/tests/static/hyperbolic_test_tts.mp3 diff --git a/pkgs/standards/swarmauri_standard/tests/static/sunset.png b/pkgs/swarmauri_standard/tests/static/sunset.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/sunset.png rename to pkgs/swarmauri_standard/tests/static/sunset.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/sunset.png 9-18-18-261 AM.png b/pkgs/swarmauri_standard/tests/static/sunset.png 9-18-18-261 AM.png similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/sunset.png 9-18-18-261 AM.png rename to pkgs/swarmauri_standard/tests/static/sunset.png 9-18-18-261 AM.png diff --git a/pkgs/standards/swarmauri_standard/tests/static/test.mp3 b/pkgs/swarmauri_standard/tests/static/test.mp3 similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/test.mp3 rename to pkgs/swarmauri_standard/tests/static/test.mp3 diff --git a/pkgs/standards/swarmauri_standard/tests/static/test_fr.mp3 b/pkgs/swarmauri_standard/tests/static/test_fr.mp3 similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/static/test_fr.mp3 rename to pkgs/swarmauri_standard/tests/static/test_fr.mp3 diff --git a/pkgs/swarmauri_standard/tests/static/test_tts.mp3 b/pkgs/swarmauri_standard/tests/static/test_tts.mp3 new file mode 100644 index 000000000..e69de29bb diff --git a/pkgs/standards/swarmauri_standard/tests/unit/agents/QAAgent_unit_test.py b/pkgs/swarmauri_standard/tests/unit/agents/QAAgent_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/agents/QAAgent_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/agents/QAAgent_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/agents/RagAgent_unit_test.py b/pkgs/swarmauri_standard/tests/unit/agents/RagAgent_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/agents/RagAgent_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/agents/RagAgent_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/agents/SimpleConversationAgent_unit_test.py b/pkgs/swarmauri_standard/tests/unit/agents/SimpleConversationAgent_unit_test.py similarity index 85% rename from pkgs/standards/swarmauri_standard/tests/unit/agents/SimpleConversationAgent_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/agents/SimpleConversationAgent_unit_test.py index 1a01c31dc..227b16637 100644 --- a/pkgs/standards/swarmauri_standard/tests/unit/agents/SimpleConversationAgent_unit_test.py +++ b/pkgs/swarmauri_standard/tests/unit/agents/SimpleConversationAgent_unit_test.py @@ -1,8 +1,8 @@ import pytest import os -from swarmauri.llms.GroqModel import GroqModel -from swarmauri.conversations.Conversation import Conversation -from swarmauri.agents.SimpleConversationAgent import SimpleConversationAgent +from swarmauri_standard.llms.GroqModel import GroqModel +from swarmauri_standard.conversations.Conversation import Conversation +from swarmauri_standard.agents.SimpleConversationAgent import SimpleConversationAgent from dotenv import load_dotenv load_dotenv() diff --git a/pkgs/standards/swarmauri_standard/tests/unit/agents/ToolAgent_unit_test.py b/pkgs/swarmauri_standard/tests/unit/agents/ToolAgent_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/agents/ToolAgent_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/agents/ToolAgent_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/chains/ChainStep_unit_test.py b/pkgs/swarmauri_standard/tests/unit/chains/ChainStep_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/chains/ChainStep_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/chains/ChainStep_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/chains/ContextChain_unit_test.py b/pkgs/swarmauri_standard/tests/unit/chains/ContextChain_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/chains/ContextChain_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/chains/ContextChain_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/chunkers/DelimiterBasedChunker_unit_test.py b/pkgs/swarmauri_standard/tests/unit/chunkers/DelimiterBasedChunker_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/chunkers/DelimiterBasedChunker_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/chunkers/DelimiterBasedChunker_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/chunkers/FixedLengthChunker_unit_test.py b/pkgs/swarmauri_standard/tests/unit/chunkers/FixedLengthChunker_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/chunkers/FixedLengthChunker_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/chunkers/FixedLengthChunker_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/chunkers/MdSnippetChunker_unit_test.py b/pkgs/swarmauri_standard/tests/unit/chunkers/MdSnippetChunker_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/chunkers/MdSnippetChunker_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/chunkers/MdSnippetChunker_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/chunkers/SentenceChunker_unit_test.py b/pkgs/swarmauri_standard/tests/unit/chunkers/SentenceChunker_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/chunkers/SentenceChunker_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/chunkers/SentenceChunker_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/chunkers/SlidingWindowChunker_unit_test.py b/pkgs/swarmauri_standard/tests/unit/chunkers/SlidingWindowChunker_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/chunkers/SlidingWindowChunker_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/chunkers/SlidingWindowChunker_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/control_panels/ControlPanel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/control_panels/ControlPanel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/control_panels/ControlPanel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/control_panels/ControlPanel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/conversations/Conversation_unit_test.py b/pkgs/swarmauri_standard/tests/unit/conversations/Conversation_unit_test.py similarity index 92% rename from pkgs/standards/swarmauri_standard/tests/unit/conversations/Conversation_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/conversations/Conversation_unit_test.py index 9676a638c..f6945898f 100644 --- a/pkgs/standards/swarmauri_standard/tests/unit/conversations/Conversation_unit_test.py +++ b/pkgs/swarmauri_standard/tests/unit/conversations/Conversation_unit_test.py @@ -1,12 +1,13 @@ import pytest from swarmauri_standard.conversations.Conversation import Conversation from swarmauri_standard.messages.HumanMessage import HumanMessage +from swarmauri_core.ComponentBase import ResourceTypes @pytest.mark.unit def test_ubc_resource(): conversation = Conversation() - assert conversation.resource == "Conversation" + assert conversation.resource == ResourceTypes.CONVERSATION @pytest.mark.unit diff --git a/pkgs/standards/swarmauri_standard/tests/unit/conversations/MaxSizeConversation_unit_test.py b/pkgs/swarmauri_standard/tests/unit/conversations/MaxSizeConversation_unit_test.py similarity index 97% rename from pkgs/standards/swarmauri_standard/tests/unit/conversations/MaxSizeConversation_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/conversations/MaxSizeConversation_unit_test.py index cbc724a48..2925b0a43 100644 --- a/pkgs/standards/swarmauri_standard/tests/unit/conversations/MaxSizeConversation_unit_test.py +++ b/pkgs/swarmauri_standard/tests/unit/conversations/MaxSizeConversation_unit_test.py @@ -4,12 +4,13 @@ from swarmauri_standard.conversations.MaxSizeConversation import ( MaxSizeConversation, ) +from swarmauri_core.ComponentBase import ResourceTypes @pytest.mark.unit def test_ubc_resource(): conversation = MaxSizeConversation() - assert conversation.resource == "Conversation" + assert conversation.resource == ResourceTypes.CONVERSATION @pytest.mark.unit diff --git a/pkgs/standards/swarmauri_standard/tests/unit/conversations/MaxSystemContextConversation_unit_test.py b/pkgs/swarmauri_standard/tests/unit/conversations/MaxSystemContextConversation_unit_test.py similarity index 96% rename from pkgs/standards/swarmauri_standard/tests/unit/conversations/MaxSystemContextConversation_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/conversations/MaxSystemContextConversation_unit_test.py index a6d77c11a..e461c62fe 100644 --- a/pkgs/standards/swarmauri_standard/tests/unit/conversations/MaxSystemContextConversation_unit_test.py +++ b/pkgs/swarmauri_standard/tests/unit/conversations/MaxSystemContextConversation_unit_test.py @@ -5,12 +5,13 @@ from swarmauri_standard.conversations.MaxSystemContextConversation import ( MaxSystemContextConversation, ) +from swarmauri_core.ComponentBase import ResourceTypes @pytest.mark.unit def test_ubc_resource(): conversation = MaxSystemContextConversation() - assert conversation.resource == "Conversation" + assert conversation.resource == ResourceTypes.CONVERSATION @pytest.mark.unit diff --git a/pkgs/standards/swarmauri_standard/tests/unit/conversations/SessionCacheConversation_unit_test.py b/pkgs/swarmauri_standard/tests/unit/conversations/SessionCacheConversation_unit_test.py similarity index 98% rename from pkgs/standards/swarmauri_standard/tests/unit/conversations/SessionCacheConversation_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/conversations/SessionCacheConversation_unit_test.py index b9830e83b..10e9d190d 100644 --- a/pkgs/standards/swarmauri_standard/tests/unit/conversations/SessionCacheConversation_unit_test.py +++ b/pkgs/swarmauri_standard/tests/unit/conversations/SessionCacheConversation_unit_test.py @@ -6,6 +6,7 @@ from swarmauri_standard.conversations.SessionCacheConversation import ( SessionCacheConversation, ) +from swarmauri_core.ComponentBase import ResourceTypes @pytest.mark.unit @@ -13,7 +14,7 @@ def test_ubc_resource(): conversation = SessionCacheConversation( system_context=SystemMessage(content="systest"), max_size=4 ) - assert conversation.resource == "Conversation" + assert conversation.resource == ResourceTypes.CONVERSATION @pytest.mark.unit diff --git a/pkgs/standards/swarmauri_standard/tests/unit/dataconnectors/GoogleDriveDataConnector_unit_test.py b/pkgs/swarmauri_standard/tests/unit/dataconnectors/GoogleDriveDataConnector_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/dataconnectors/GoogleDriveDataConnector_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/dataconnectors/GoogleDriveDataConnector_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/CanberraDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/CanberraDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/CanberraDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/CanberraDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/ChebyShevDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/ChebyShevDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/ChebyShevDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/ChebyShevDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/ChiSquaredDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/ChiSquaredDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/ChiSquaredDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/ChiSquaredDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/CosineDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/CosineDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/CosineDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/CosineDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/EuclideanDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/EuclideanDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/EuclideanDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/EuclideanDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/HaversineDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/HaversineDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/HaversineDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/HaversineDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/JaccardIndexDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/JaccardIndexDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/JaccardIndexDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/JaccardIndexDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/LevenshteinDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/LevenshteinDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/LevenshteinDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/LevenshteinDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/ManhattanDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/ManhattanDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/ManhattanDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/ManhattanDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/SorensenDiceDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/SorensenDiceDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/SorensenDiceDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/SorensenDiceDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/distances/SquaredEuclideanDistance_unit_test.py b/pkgs/swarmauri_standard/tests/unit/distances/SquaredEuclideanDistance_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/distances/SquaredEuclideanDistance_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/distances/SquaredEuclideanDistance_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/documents/Document_unit_test.py b/pkgs/swarmauri_standard/tests/unit/documents/Document_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/documents/Document_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/documents/Document_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/embeddings/CohereEmbedding_unit_test.py b/pkgs/swarmauri_standard/tests/unit/embeddings/CohereEmbedding_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/embeddings/CohereEmbedding_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/embeddings/CohereEmbedding_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/embeddings/GeminiEmbedding_unit_test.py b/pkgs/swarmauri_standard/tests/unit/embeddings/GeminiEmbedding_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/embeddings/GeminiEmbedding_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/embeddings/GeminiEmbedding_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/embeddings/MistralEmbedding_unit_test.py b/pkgs/swarmauri_standard/tests/unit/embeddings/MistralEmbedding_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/embeddings/MistralEmbedding_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/embeddings/MistralEmbedding_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/embeddings/OpenAIEmbedding_unit_test.py b/pkgs/swarmauri_standard/tests/unit/embeddings/OpenAIEmbedding_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/embeddings/OpenAIEmbedding_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/embeddings/OpenAIEmbedding_unit_test.py diff --git a/pkgs/swarmauri_standard/tests/unit/embeddings/TfidfEmbedding_unit_test.py b/pkgs/swarmauri_standard/tests/unit/embeddings/TfidfEmbedding_unit_test.py new file mode 100644 index 000000000..3d4929211 --- /dev/null +++ b/pkgs/swarmauri_standard/tests/unit/embeddings/TfidfEmbedding_unit_test.py @@ -0,0 +1,44 @@ +import pytest +from swarmauri_standard.embeddings.TfidfEmbedding import TfidfEmbedding + + +@pytest.mark.unit +def test_ubc_resource(): + def test(): + assert TfidfEmbedding().resource == "Embedding" + + test() + + +@pytest.mark.unit +def test_ubc_type(): + assert TfidfEmbedding().type == "TfidfEmbedding" + + +@pytest.mark.unit +def test_serialization(): + embedder = TfidfEmbedding() + assert ( + embedder.id == TfidfEmbedding.model_validate_json(embedder.model_dump_json()).id + ) + + +@pytest.mark.unit +def test_fit_transform(): + embedder = TfidfEmbedding() + documents = ["test", "test1", "test2"] + embedder.fit_transform(documents) + assert documents == embedder.extract_features() + + +@pytest.mark.unit +def test_infer_vector(): + embedder = TfidfEmbedding() + documents = ["test", "test1", "test2"] + embedder.fit_transform(documents) + assert embedder.infer_vector("hi", documents).value == [ + 1.3862943611198906, + 0.0, + 0.0, + 0.0, + ] diff --git a/pkgs/standards/swarmauri_standard/tests/unit/embeddings/VoyageEmbedding_unit_test.py b/pkgs/swarmauri_standard/tests/unit/embeddings/VoyageEmbedding_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/embeddings/VoyageEmbedding_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/embeddings/VoyageEmbedding_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/factories/AgentFactory_unit_test.py b/pkgs/swarmauri_standard/tests/unit/factories/AgentFactory_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/factories/AgentFactory_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/factories/AgentFactory_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/factories/Factory_unit_test.py b/pkgs/swarmauri_standard/tests/unit/factories/Factory_unit_test.py similarity index 66% rename from pkgs/standards/swarmauri_standard/tests/unit/factories/Factory_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/factories/Factory_unit_test.py index bc9ee61f2..5f35d12a9 100644 --- a/pkgs/standards/swarmauri_standard/tests/unit/factories/Factory_unit_test.py +++ b/pkgs/swarmauri_standard/tests/unit/factories/Factory_unit_test.py @@ -1,7 +1,7 @@ import pytest from swarmauri_standard.factories.Factory import Factory -from swarmauri_standard.parsers.BeautifulSoupElementParser import ( - BeautifulSoupElementParser, +from swarmauri_standard.parsers.HTMLTagStripParser import ( + HTMLTagStripParser, ) @@ -28,16 +28,14 @@ def test_serialization(factory): @pytest.mark.unit def test_factory_register_create_resource(factory): # Register a resource and type - factory.register("Parser", "BeautifulSoupElementParser", BeautifulSoupElementParser) + factory.register("Parser", "HTMLTagStripParser", HTMLTagStripParser) html_content = "

Sample HTML content

" # Create an instance - instance = factory.create( - "Parser", "BeautifulSoupElementParser", element=html_content - ) - assert isinstance(instance, BeautifulSoupElementParser) - assert instance.type == "BeautifulSoupElementParser" + instance = factory.create("Parser", "HTMLTagStripParser", element=html_content) + assert isinstance(instance, HTMLTagStripParser) + assert instance.type == "HTMLTagStripParser" @pytest.mark.unit @@ -46,7 +44,7 @@ def test_factory_create_unregistered_resource(factory): with pytest.raises( ModuleNotFoundError, match="Resource 'UnknownResource' is not registered." ): - factory.create("UnknownResource", "BeautifulSoupElementParser") + factory.create("UnknownResource", "HTMLTagStripParser") @pytest.mark.unit @@ -54,11 +52,9 @@ def test_factory_duplicate_register(factory): # Attempt to register the same type again with pytest.raises( ValueError, - match="Type 'BeautifulSoupElementParser' is already registered under resource 'Parser'.", + match="Type 'HTMLTagStripParser' is already registered under resource 'Parser'.", ): - factory.register( - "Parser", "BeautifulSoupElementParser", BeautifulSoupElementParser - ) + factory.register("Parser", "HTMLTagStripParser", HTMLTagStripParser) @pytest.mark.unit diff --git a/pkgs/standards/swarmauri_standard/tests/unit/image_gens/BlackForestImgGenModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/image_gens/BlackForestImgGenModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/image_gens/BlackForestImgGenModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/image_gens/BlackForestImgGenModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/image_gens/DeepInfraImgGenModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/image_gens/DeepInfraImgGenModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/image_gens/DeepInfraImgGenModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/image_gens/DeepInfraImgGenModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/image_gens/FalAIImgGenModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/image_gens/FalAIImgGenModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/image_gens/FalAIImgGenModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/image_gens/FalAIImgGenModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/image_gens/HyperbolicImgGenModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/image_gens/HyperbolicImgGenModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/image_gens/HyperbolicImgGenModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/image_gens/HyperbolicImgGenModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/image_gens/OpenAIImgGenModel_unit_tesst.py b/pkgs/swarmauri_standard/tests/unit/image_gens/OpenAIImgGenModel_unit_tesst.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/image_gens/OpenAIImgGenModel_unit_tesst.py rename to pkgs/swarmauri_standard/tests/unit/image_gens/OpenAIImgGenModel_unit_tesst.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/AI21StudioModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/AI21StudioModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/AI21StudioModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/AI21StudioModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/AnthropicModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/AnthropicModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/AnthropicModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/AnthropicModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/CohereModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/CohereModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/CohereModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/CohereModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/DeepInfraModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/DeepInfraModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/DeepInfraModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/DeepInfraModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/DeepSeekModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/DeepSeekModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/DeepSeekModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/DeepSeekModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/FalAIVisionModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/FalAIVisionModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/FalAIVisionModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/FalAIVisionModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/GeminiProModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/GeminiProModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/GeminiProModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/GeminiProModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/GroqAIAudio_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/GroqAIAudio_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/GroqAIAudio_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/GroqAIAudio_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/GroqModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/GroqModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/GroqModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/GroqModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/GroqVisionModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/GroqVisionModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/GroqVisionModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/GroqVisionModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/HyperbolicAudioTTS_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/HyperbolicAudioTTS_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/HyperbolicAudioTTS_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/HyperbolicAudioTTS_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/HyperbolicModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/HyperbolicModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/HyperbolicModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/HyperbolicModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/HyperbolicVisionModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/HyperbolicVisionModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/HyperbolicVisionModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/HyperbolicVisionModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/MistralModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/MistralModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/MistralModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/MistralModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIAudioTTS_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/OpenAIAudioTTS_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIAudioTTS_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/OpenAIAudioTTS_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIAudio_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/OpenAIAudio_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIAudio_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/OpenAIAudio_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/OpenAIModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/OpenAIModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIReasonModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/OpenAIReasonModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/OpenAIReasonModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/OpenAIReasonModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/PerplexityModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/PerplexityModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/PerplexityModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/PerplexityModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/PlayHTModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/PlayHTModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/PlayHTModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/PlayHTModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/llms/WhisperLargeModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/llms/WhisperLargeModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/llms/WhisperLargeModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/llms/WhisperLargeModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/CompletenessMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/CompletenessMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/CompletenessMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/CompletenessMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/DistinctivenessMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/DistinctivenessMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/DistinctivenessMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/DistinctivenessMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/FirstImpressionMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/FirstImpressionMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/FirstImpressionMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/FirstImpressionMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/MeanMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/MeanMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/MeanMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/MeanMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/MiscMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/MiscMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/MiscMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/MiscMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/MissingnessMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/MissingnessMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/MissingnessMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/MissingnessMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/PatternMatchingMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/PatternMatchingMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/PatternMatchingMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/PatternMatchingMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/RatioOfSumsMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/RatioOfSumsMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/RatioOfSumsMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/RatioOfSumsMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/StaticMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/StaticMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/StaticMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/StaticMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/UniquenessMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/UniquenessMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/UniquenessMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/UniquenessMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/measurements/ZeroMeasurement_unit_test.py b/pkgs/swarmauri_standard/tests/unit/measurements/ZeroMeasurement_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/measurements/ZeroMeasurement_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/measurements/ZeroMeasurement_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/messages/AgentMessage_unit_test.py b/pkgs/swarmauri_standard/tests/unit/messages/AgentMessage_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/messages/AgentMessage_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/messages/AgentMessage_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/messages/FunctionMessage_unit_test.py b/pkgs/swarmauri_standard/tests/unit/messages/FunctionMessage_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/messages/FunctionMessage_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/messages/FunctionMessage_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/messages/HumanMessage_unit_test.py b/pkgs/swarmauri_standard/tests/unit/messages/HumanMessage_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/messages/HumanMessage_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/messages/HumanMessage_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/messages/SystemMessage_unit_test.py b/pkgs/swarmauri_standard/tests/unit/messages/SystemMessage_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/messages/SystemMessage_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/messages/SystemMessage_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/HTMLTagStripParser_unit_test.py b/pkgs/swarmauri_standard/tests/unit/parsers/HTMLTagStripParser_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/parsers/HTMLTagStripParser_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/parsers/HTMLTagStripParser_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/Md2HtmlParser_unit_test.py b/pkgs/swarmauri_standard/tests/unit/parsers/Md2HtmlParser_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/parsers/Md2HtmlParser_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/parsers/Md2HtmlParser_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/OpenAPISpecParser_unit_test.py b/pkgs/swarmauri_standard/tests/unit/parsers/OpenAPISpecParser_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/parsers/OpenAPISpecParser_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/parsers/OpenAPISpecParser_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/PhoneNumberExtractorParser_unit_test.py b/pkgs/swarmauri_standard/tests/unit/parsers/PhoneNumberExtractorParser_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/parsers/PhoneNumberExtractorParser_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/parsers/PhoneNumberExtractorParser_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/PythonParser_unit_test.py b/pkgs/swarmauri_standard/tests/unit/parsers/PythonParser_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/parsers/PythonParser_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/parsers/PythonParser_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/RegExParser_unit_test.py b/pkgs/swarmauri_standard/tests/unit/parsers/RegExParser_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/parsers/RegExParser_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/parsers/RegExParser_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/URLExtractorParser_unit_test.py b/pkgs/swarmauri_standard/tests/unit/parsers/URLExtractorParser_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/parsers/URLExtractorParser_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/parsers/URLExtractorParser_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/parsers/XMLParser_unit_test.py b/pkgs/swarmauri_standard/tests/unit/parsers/XMLParser_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/parsers/XMLParser_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/parsers/XMLParser_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/pipelines/Pipeline_unit_test.py b/pkgs/swarmauri_standard/tests/unit/pipelines/Pipeline_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/pipelines/Pipeline_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/pipelines/Pipeline_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/prompt_templates/PromptTemplate_unit_test.py b/pkgs/swarmauri_standard/tests/unit/prompt_templates/PromptTemplate_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/prompt_templates/PromptTemplate_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/prompt_templates/PromptTemplate_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/prompts/PromptGenerator_unit_test.py b/pkgs/swarmauri_standard/tests/unit/prompts/PromptGenerator_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/prompts/PromptGenerator_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/prompts/PromptGenerator_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/prompts/PromptMatrix_unit_test.py b/pkgs/swarmauri_standard/tests/unit/prompts/PromptMatrix_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/prompts/PromptMatrix_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/prompts/PromptMatrix_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/prompts/Prompt_unit_test.py b/pkgs/swarmauri_standard/tests/unit/prompts/Prompt_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/prompts/Prompt_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/prompts/Prompt_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/schema_converters/AnthropicSchemaConverter_unit_test.py b/pkgs/swarmauri_standard/tests/unit/schema_converters/AnthropicSchemaConverter_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/schema_converters/AnthropicSchemaConverter_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/schema_converters/AnthropicSchemaConverter_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/schema_converters/CohereSchemaConverter_unit_test.py b/pkgs/swarmauri_standard/tests/unit/schema_converters/CohereSchemaConverter_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/schema_converters/CohereSchemaConverter_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/schema_converters/CohereSchemaConverter_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/schema_converters/GeminiSchemaConverter_unit_test.py b/pkgs/swarmauri_standard/tests/unit/schema_converters/GeminiSchemaConverter_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/schema_converters/GeminiSchemaConverter_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/schema_converters/GeminiSchemaConverter_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/schema_converters/GroqSchemaConverter_unit_test.py b/pkgs/swarmauri_standard/tests/unit/schema_converters/GroqSchemaConverter_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/schema_converters/GroqSchemaConverter_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/schema_converters/GroqSchemaConverter_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/schema_converters/MistralSchemaConverter_unit_test.py b/pkgs/swarmauri_standard/tests/unit/schema_converters/MistralSchemaConverter_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/schema_converters/MistralSchemaConverter_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/schema_converters/MistralSchemaConverter_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/schema_converters/OpenAISchemaConverter_unit_test.py b/pkgs/swarmauri_standard/tests/unit/schema_converters/OpenAISchemaConverter_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/schema_converters/OpenAISchemaConverter_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/schema_converters/OpenAISchemaConverter_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/schema_converters/ShuttleAISchemaConverter_unit_test.py b/pkgs/swarmauri_standard/tests/unit/schema_converters/ShuttleAISchemaConverter_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/schema_converters/ShuttleAISchemaConverter_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/schema_converters/ShuttleAISchemaConverter_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/service_registries/ServiceRegistry_unit_test.py b/pkgs/swarmauri_standard/tests/unit/service_registries/ServiceRegistry_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/service_registries/ServiceRegistry_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/service_registries/ServiceRegistry_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/state/DictState_unit_test.py b/pkgs/swarmauri_standard/tests/unit/state/DictState_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/state/DictState_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/state/DictState_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/stt/GroqSTT_unit_test.py b/pkgs/swarmauri_standard/tests/unit/stt/GroqSTT_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/stt/GroqSTT_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/stt/GroqSTT_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/stt/OpenaiSTT_unit_test.py b/pkgs/swarmauri_standard/tests/unit/stt/OpenaiSTT_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/stt/OpenaiSTT_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/stt/OpenaiSTT_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/stt/WhisperLargeSTT_unit_test.py b/pkgs/swarmauri_standard/tests/unit/stt/WhisperLargeSTT_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/stt/WhisperLargeSTT_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/stt/WhisperLargeSTT_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/swarms/Swarm_unit_test.py b/pkgs/swarmauri_standard/tests/unit/swarms/Swarm_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/swarms/Swarm_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/swarms/Swarm_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/task_mgmt_strategies/RoundRobinStrategy_unit_test.py b/pkgs/swarmauri_standard/tests/unit/task_mgmt_strategies/RoundRobinStrategy_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/task_mgmt_strategies/RoundRobinStrategy_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/task_mgmt_strategies/RoundRobinStrategy_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tool_llms/AnthropicToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tool_llms/AnthropicToolModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tool_llms/AnthropicToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tool_llms/AnthropicToolModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tool_llms/CohereToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tool_llms/CohereToolModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tool_llms/CohereToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tool_llms/CohereToolModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tool_llms/GeminiToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tool_llms/GeminiToolModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tool_llms/GeminiToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tool_llms/GeminiToolModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tool_llms/GroqToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tool_llms/GroqToolModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tool_llms/GroqToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tool_llms/GroqToolModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tool_llms/MistralToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tool_llms/MistralToolModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tool_llms/MistralToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tool_llms/MistralToolModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tool_llms/OpenAIToolModel_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tool_llms/OpenAIToolModel_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tool_llms/OpenAIToolModel_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tool_llms/OpenAIToolModel_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/toolkits/AccessibilityToolkit_unit_test.py b/pkgs/swarmauri_standard/tests/unit/toolkits/AccessibilityToolkit_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/toolkits/AccessibilityToolkit_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/toolkits/AccessibilityToolkit_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/toolkits/Toolkit_unit_test.py b/pkgs/swarmauri_standard/tests/unit/toolkits/Toolkit_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/toolkits/Toolkit_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/toolkits/Toolkit_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/AdditionTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/AdditionTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/AdditionTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/AdditionTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/AutomatedReadabilityIndex_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/AutomatedReadabilityIndex_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/AutomatedReadabilityIndex_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/AutomatedReadabilityIndex_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/CalculatorTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/CalculatorTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/CalculatorTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/CalculatorTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/CodeExtractorTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/CodeExtractorTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/CodeExtractorTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/CodeExtractorTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/CodeInterpreterTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/CodeInterpreterTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/CodeInterpreterTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/CodeInterpreterTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/ColemanLiauIndex_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/ColemanLiauIndex_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/ColemanLiauIndex_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/ColemanLiauIndex_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/FleschKincaid_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/FleschKincaid_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/FleschKincaid_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/FleschKincaid_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/FleschReadingEaseTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/FleschReadingEaseTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/FleschReadingEaseTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/FleschReadingEaseTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/GunningFogTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/GunningFogTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/GunningFogTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/GunningFogTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/ImportMemoryModuleTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/ImportMemoryModuleTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/ImportMemoryModuleTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/ImportMemoryModuleTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/JSONRequestsTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/JSONRequestsTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/JSONRequestsTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/JSONRequestsTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/Parameter_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/Parameter_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/Parameter_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/Parameter_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/TemperatureConverterTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/TemperatureConverterTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/TemperatureConverterTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/TemperatureConverterTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/TestTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/TestTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/TestTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/TestTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tools/WeatherTool_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tools/WeatherTool_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tools/WeatherTool_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tools/WeatherTool_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/transports/PubSubTransport_unit_test.py b/pkgs/swarmauri_standard/tests/unit/transports/PubSubTransport_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/transports/PubSubTransport_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/transports/PubSubTransport_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tts/HyperbolicTTS_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tts/HyperbolicTTS_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tts/HyperbolicTTS_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tts/HyperbolicTTS_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tts/OpenaiTTS_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tts/OpenaiTTS_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tts/OpenaiTTS_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tts/OpenaiTTS_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/tts/PlayhtTTS_unit_test.py b/pkgs/swarmauri_standard/tests/unit/tts/PlayhtTTS_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/tts/PlayhtTTS_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/tts/PlayhtTTS_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/typing/typing_unit_test.py b/pkgs/swarmauri_standard/tests/unit/typing/typing_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/typing/typing_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/typing/typing_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/apply_metaclass_test.py b/pkgs/swarmauri_standard/tests/unit/utils/apply_metaclass_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/apply_metaclass_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/apply_metaclass_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/base64_to_file_path_test.py b/pkgs/swarmauri_standard/tests/unit/utils/base64_to_file_path_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/base64_to_file_path_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/base64_to_file_path_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/base64_to_img_url_test.py b/pkgs/swarmauri_standard/tests/unit/utils/base64_to_img_url_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/base64_to_img_url_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/base64_to_img_url_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/base64_to_in_memory_img_test.py b/pkgs/swarmauri_standard/tests/unit/utils/base64_to_in_memory_img_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/base64_to_in_memory_img_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/base64_to_in_memory_img_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/decorate_test.py b/pkgs/swarmauri_standard/tests/unit/utils/decorate_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/decorate_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/decorate_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/file_path_to_base64_test.py b/pkgs/swarmauri_standard/tests/unit/utils/file_path_to_base64_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/file_path_to_base64_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/file_path_to_base64_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/file_path_to_img_url_test.py b/pkgs/swarmauri_standard/tests/unit/utils/file_path_to_img_url_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/file_path_to_img_url_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/file_path_to_img_url_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/file_path_to_in_memory_img_test.py b/pkgs/swarmauri_standard/tests/unit/utils/file_path_to_in_memory_img_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/file_path_to_in_memory_img_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/file_path_to_in_memory_img_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/get_class_hash_test.py b/pkgs/swarmauri_standard/tests/unit/utils/get_class_hash_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/get_class_hash_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/get_class_hash_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/img_url_to_base64_test.py b/pkgs/swarmauri_standard/tests/unit/utils/img_url_to_base64_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/img_url_to_base64_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/img_url_to_base64_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/img_url_to_file_path_test.py b/pkgs/swarmauri_standard/tests/unit/utils/img_url_to_file_path_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/img_url_to_file_path_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/img_url_to_file_path_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/img_url_to_in_memory_img_test.py b/pkgs/swarmauri_standard/tests/unit/utils/img_url_to_in_memory_img_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/img_url_to_in_memory_img_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/img_url_to_in_memory_img_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/in_memory_img_to_base64_test.py b/pkgs/swarmauri_standard/tests/unit/utils/in_memory_img_to_base64_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/in_memory_img_to_base64_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/in_memory_img_to_base64_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/in_memory_img_to_file_path_test.py b/pkgs/swarmauri_standard/tests/unit/utils/in_memory_img_to_file_path_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/in_memory_img_to_file_path_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/in_memory_img_to_file_path_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/in_memory_img_to_img_url_test.py b/pkgs/swarmauri_standard/tests/unit/utils/in_memory_img_to_img_url_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/in_memory_img_to_img_url_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/in_memory_img_to_img_url_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/json_validator_test.py b/pkgs/swarmauri_standard/tests/unit/utils/json_validator_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/json_validator_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/json_validator_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/load_documents_from_json_test.py b/pkgs/swarmauri_standard/tests/unit/utils/load_documents_from_json_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/load_documents_from_json_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/load_documents_from_json_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/memoize_test.py b/pkgs/swarmauri_standard/tests/unit/utils/memoize_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/memoize_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/memoize_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/method_signature_extractor_decorator_test.py b/pkgs/swarmauri_standard/tests/unit/utils/method_signature_extractor_decorator_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/method_signature_extractor_decorator_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/method_signature_extractor_decorator_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/utils/print_notebook_metadata_test.py b/pkgs/swarmauri_standard/tests/unit/utils/print_notebook_metadata_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/utils/print_notebook_metadata_test.py rename to pkgs/swarmauri_standard/tests/unit/utils/print_notebook_metadata_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/vector_stores/TfidfVectorStore_unit_test.py b/pkgs/swarmauri_standard/tests/unit/vector_stores/TfidfVectorStore_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/vector_stores/TfidfVectorStore_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/vector_stores/TfidfVectorStore_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/vectors/Vector_unit_test.py b/pkgs/swarmauri_standard/tests/unit/vectors/Vector_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/vectors/Vector_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/vectors/Vector_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/vlms/FalVLM_unit_test.py b/pkgs/swarmauri_standard/tests/unit/vlms/FalVLM_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/vlms/FalVLM_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/vlms/FalVLM_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/vlms/GroqVLM_unit_test.py b/pkgs/swarmauri_standard/tests/unit/vlms/GroqVLM_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/vlms/GroqVLM_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/vlms/GroqVLM_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/unit/vlms/HyperbolicVLM_unit_test.py b/pkgs/swarmauri_standard/tests/unit/vlms/HyperbolicVLM_unit_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/unit/vlms/HyperbolicVLM_unit_test.py rename to pkgs/swarmauri_standard/tests/unit/vlms/HyperbolicVLM_unit_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/AnthropicToolModel_xfail_test.py b/pkgs/swarmauri_standard/tests/xfail/llms/AnthropicToolModel_xfail_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/AnthropicToolModel_xfail_test.py rename to pkgs/swarmauri_standard/tests/xfail/llms/AnthropicToolModel_xfail_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/CohereModel_xfail_test.py b/pkgs/swarmauri_standard/tests/xfail/llms/CohereModel_xfail_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/CohereModel_xfail_test.py rename to pkgs/swarmauri_standard/tests/xfail/llms/CohereModel_xfail_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/DeepInfraModel_xfail_test.py b/pkgs/swarmauri_standard/tests/xfail/llms/DeepInfraModel_xfail_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/DeepInfraModel_xfail_test.py rename to pkgs/swarmauri_standard/tests/xfail/llms/DeepInfraModel_xfail_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/GeminiProModel_xfail_test.py b/pkgs/swarmauri_standard/tests/xfail/llms/GeminiProModel_xfail_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/GeminiProModel_xfail_test.py rename to pkgs/swarmauri_standard/tests/xfail/llms/GeminiProModel_xfail_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/GroqModel_xfail_test.py b/pkgs/swarmauri_standard/tests/xfail/llms/GroqModel_xfail_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/GroqModel_xfail_test.py rename to pkgs/swarmauri_standard/tests/xfail/llms/GroqModel_xfail_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/GroqToolModel_xfail_test.py b/pkgs/swarmauri_standard/tests/xfail/llms/GroqToolModel_xfail_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/GroqToolModel_xfail_test.py rename to pkgs/swarmauri_standard/tests/xfail/llms/GroqToolModel_xfail_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/MistralModel_xfail_test.py b/pkgs/swarmauri_standard/tests/xfail/llms/MistralModel_xfail_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/MistralModel_xfail_test.py rename to pkgs/swarmauri_standard/tests/xfail/llms/MistralModel_xfail_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/MistralToolModel_xfail_test.py b/pkgs/swarmauri_standard/tests/xfail/llms/MistralToolModel_xfail_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/MistralToolModel_xfail_test.py rename to pkgs/swarmauri_standard/tests/xfail/llms/MistralToolModel_xfail_test.py diff --git a/pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/PerplexityModel_xfail_test.py b/pkgs/swarmauri_standard/tests/xfail/llms/PerplexityModel_xfail_test.py similarity index 100% rename from pkgs/standards/swarmauri_standard/tests/expected_to_fail/llms/PerplexityModel_xfail_test.py rename to pkgs/swarmauri_standard/tests/xfail/llms/PerplexityModel_xfail_test.py diff --git a/pkgs/tooling/monorepo_manager/README.md b/pkgs/tooling/monorepo_manager/README.md deleted file mode 100644 index 66db27fad..000000000 --- a/pkgs/tooling/monorepo_manager/README.md +++ /dev/null @@ -1,245 +0,0 @@ -

- Swamauri Logo -
- Hits - License - PyPI - monorepo_manager Version - PyPI - monorepo_manager Downloads -
- Python -

- - -# Monorepo Manager - -**Monorepo Manager** is a unified command-line tool for managing a Python monorepo that contains multiple standalone packages—each with its own `pyproject.toml`. It consolidates common tasks such as dependency management, version bumping, remote dependency resolution, test execution and analysis, and project configuration updates into one robust CLI. - -## Features - -- **Dependency Management** - - **Lock:** Generate a `poetry.lock` file. - - **Install:** Install dependencies with options for extras and development dependencies. - - **Show Freeze:** (Available as an internal command) Display installed packages using `pip freeze`. - -- **Build Operations** - - **Build:** Recursively build packages based on local (path) dependencies as specified in your `pyproject.toml` files. - -- **Version Management** - - **Version:** Bump (major, minor, patch, finalize) or explicitly set package versions in `pyproject.toml`. - -- **Remote Operations** - - **Remote Fetch:** Fetch the version from a remote GitHub repository’s `pyproject.toml`. - - **Remote Update:** Update a local `pyproject.toml` file with version information fetched from remote Git dependencies. - -- **Testing and Analysis** - - **Test:** Run your tests using pytest. Optionally, run tests in parallel (supports [pytest‑xdist](https://pypi.org/project/pytest-xdist/)). - - **Analyze:** Analyze test results provided in a JSON file by displaying summary statistics and evaluating threshold conditions for passed/skipped tests. - -- **Pyproject Operations** - - **Pyproject:** Extract both local (path) and Git-based dependencies from a `pyproject.toml` file and, optionally, update dependency versions. - -## Installation - -Install via pip: - -```bash -pip install monorepo-manager -``` - -_This command installs the `monorepo-manager` CLI, which is provided via the entry point `monorepo-manager`, into your system PATH._ - -## Usage - -After installation, run the following command to see a list of available commands: - -```bash -monorepo-manager --help -``` - -### Command Examples - -#### 1. Lock Dependencies - -Generate a `poetry.lock` file by specifying either a directory or a file path containing a `pyproject.toml`: - -```bash -# Lock using a directory: -monorepo-manager lock --directory ./packages/package1 - -# Lock using an explicit pyproject.toml file: -monorepo-manager lock --file ./packages/package1/pyproject.toml -``` - -#### 2. Install Dependencies - -Install dependencies with options for extras and including development dependencies: - -```bash -# Basic installation: -monorepo-manager install --directory ./packages/package1 - -# Using an explicit pyproject.toml file: -monorepo-manager install --file ./packages/package1/pyproject.toml - -# Install including development dependencies: -monorepo-manager install --directory ./packages/package1 --dev - -# Install including extras (e.g., extras named "full"): -monorepo-manager install --directory ./packages/package2 --extras full - -# Install including all extras: -monorepo-manager install --directory ./packages/package2 --all-extras -``` - -#### 3. Build Packages - -Recursively build packages based on their local dependency paths defined in their `pyproject.toml` files: - -```bash -# Build packages using a directory containing a master pyproject.toml: -monorepo-manager build --directory . - -# Build packages using an explicit pyproject.toml file: -monorepo-manager build --file ./packages/package1/pyproject.toml -``` - -#### 4. Version Management - -Bump or explicitly set the version in a package's `pyproject.toml`: - -```bash -# Bump the patch version (e.g., from 1.2.3.dev1 to 1.2.3.dev2): -monorepo-manager version ./packages/package1/pyproject.toml --bump patch - -# Finalize a development version (remove the .dev suffix): -monorepo-manager version ./packages/package1/pyproject.toml --bump finalize - -# Set an explicit version: -monorepo-manager version ./packages/package1/pyproject.toml --set 2.0.0.dev1 -``` - -#### 5. Remote Operations - -Fetch remote version information and update your local dependency configuration: - -```bash -# Fetch the version from a remote GitHub repository's pyproject.toml: -monorepo-manager remote fetch --git-url https://github.com/YourOrg/YourRepo.git --branch main --subdir "src/" - -# Update a local pyproject.toml with remote-resolved versions: -# (If --output is omitted, the input file is overwritten.) -monorepo-manager remote update --input ./packages/package1/pyproject.toml --output ./packages/package1/pyproject.updated.toml -``` - -#### 6. Testing and Analysis - -Run your tests using pytest and analyze test results from a JSON report: - -- **Run Tests:** - Execute tests within a specified directory. Use the `--num-workers` flag for parallel execution (requires pytest‑xdist): - - ```bash - # Run tests sequentially: - monorepo-manager test --directory ./tests - - # Run tests in parallel using 4 workers: - monorepo-manager test --directory ./tests --num-workers 4 - ``` - -- **Analyze Test Results:** - Analyze a JSON test report and enforce thresholds for passed and skipped tests: - - ```bash - # Analyze test results without thresholds: - monorepo-manager analyze test-results.json - - # Analyze test results with thresholds (e.g., passed tests > 75% and skipped tests < 20%): - monorepo-manager analyze test-results.json --required-passed gt:75 --required-skipped lt:20 - ``` - -#### 7. Pyproject Operations - -Extract and update dependency information from a `pyproject.toml` file: - -```bash -# Extract local (path) and Git-based dependencies: -monorepo-manager pyproject --pyproject ./packages/package1/pyproject.toml - -# Update local dependency versions to 2.0.0 (updates the parent file and, if possible, each dependency's own pyproject.toml): -monorepo-manager pyproject --pyproject ./packages/package1/pyproject.toml --update-version 2.0.0 -``` - -## Workflow Example in GitHub Actions - -Here's an example GitHub Actions workflow that uses **monorepo_manager** to lock, build, install, test, bump the patch version, and publish: - -```yaml -name: Release Workflow - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - release: - runs-on: ubuntu-latest - steps: - - name: Checkout Repository - uses: actions/checkout@v3 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.9' - - - name: Install monorepo_manager Tools - run: pip install "monorepo_manager@git+https://github.com/swarmauri/monorepo_manager.git@master" - - - name: Lock Dependencies - run: monorepo-manager lock --directory . - - - name: Build Packages - run: monorepo-manager build --directory . - - - name: Install Dependencies - run: monorepo-manager install --directory . - - - name: Run Tests - run: monorepo-manager test --directory ./tests --num-workers 4 - - - name: Bump Patch Version - run: monorepo-manager version ./packages/package1/pyproject.toml --bump patch - - - name: Publish Packages - env: - PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} - PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: monorepo-manager publish --directory . --username "$PYPI_USERNAME" --password "$PYPI_PASSWORD" -``` - -## Development - -### Project Structure - -``` -monorepo_manager/ -├── __init__.py -├── cli.py # Main CLI entry point -├── poetry_ops.py # Poetry operations (lock, install, build, publish, run tests, etc.) -├── version_ops.py # Version bumping and setting operations -├── remote_ops.py # Remote Git dependency version fetching/updating -├── test_ops.py # Test result analysis operations -└── pyproject_ops.py # pyproject.toml dependency extraction and updates -pyproject.toml # Package configuration file containing metadata -README.md # This file -``` - -## Contributing - -Contributions are welcome! Please open issues or submit pull requests for improvements or bug fixes. - -## License - -This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details. diff --git a/pkgs/tooling/monorepo_manager/monorepo_manager/cli.py b/pkgs/tooling/monorepo_manager/monorepo_manager/cli.py deleted file mode 100644 index ec94ef48f..000000000 --- a/pkgs/tooling/monorepo_manager/monorepo_manager/cli.py +++ /dev/null @@ -1,265 +0,0 @@ -#!/usr/bin/env python3 -""" -cli.py - -This is the main entry point for the monorepo management CLI. -It provides commands to: - - Manage Poetry-based operations (lock, install, build, show pip-freeze, publish) - - Manage version operations (bump or set versions in pyproject.toml) - - Manage remote operations (fetch/update Git dependency versions) - - Run tests using pytest (with optional parallelism) - - Analyze test results from a JSON file - - Operate on pyproject.toml files (extract and update dependency versions) - -The commands are intentionally named with simple terms (e.g. "lock" instead of "poetry lock", -"install" instead of "poetry install", and "test" instead of "test-analyze"). -""" - -import argparse -import sys - -# Import operations from the local modules. -from monorepo_manager import poetry_ops -from monorepo_manager import version_ops -from monorepo_manager import remote_ops -from monorepo_manager import test_ops -from monorepo_manager import pyproject_ops - - -def main(): - parser = argparse.ArgumentParser( - description="A CLI for managing a Python monorepo with multiple standalone packages." - ) - subparsers = parser.add_subparsers( - dest="command", required=True, help="Available commands" - ) - - # ------------------------------------------------ - # Command: lock - # ------------------------------------------------ - lock_parser = subparsers.add_parser("lock", help="Generate a poetry.lock file") - lock_parser.add_argument( - "--directory", type=str, help="Directory containing a pyproject.toml" - ) - lock_parser.add_argument( - "--file", type=str, help="Explicit path to a pyproject.toml file" - ) - - # ------------------------------------------------ - # Command: install - # ------------------------------------------------ - install_parser = subparsers.add_parser("install", help="Install dependencies") - install_parser.add_argument( - "--directory", type=str, help="Directory containing a pyproject.toml" - ) - install_parser.add_argument( - "--file", type=str, help="Explicit path to a pyproject.toml file" - ) - install_parser.add_argument( - "--extras", type=str, help="Extras to include (e.g. 'full')" - ) - install_parser.add_argument( - "--dev", action="store_true", help="Include dev dependencies" - ) - install_parser.add_argument( - "--all-extras", action="store_true", help="Include all extras" - ) - - # ------------------------------------------------ - # Command: build - # ------------------------------------------------ - build_parser = subparsers.add_parser( - "build", help="Build packages recursively based on path dependencies" - ) - build_parser.add_argument( - "--directory", type=str, help="Directory containing pyproject.toml" - ) - build_parser.add_argument( - "--file", type=str, help="Explicit path to a pyproject.toml file" - ) - - # ------------------------------------------------ - # Command: version - # ------------------------------------------------ - version_parser = subparsers.add_parser( - "version", help="Bump or set package version" - ) - version_parser.add_argument( - "pyproject_file", type=str, help="Path to the pyproject.toml file" - ) - vgroup = version_parser.add_mutually_exclusive_group(required=True) - vgroup.add_argument( - "--bump", - choices=["major", "minor", "patch", "finalize"], - help="Bump the version (e.g. patch, major, minor, finalize)", - ) - vgroup.add_argument( - "--set", dest="set_ver", help="Explicit version to set (e.g. 2.0.0.dev1)" - ) - - # ------------------------------------------------ - # Command: remote - # ------------------------------------------------ - remote_parser = subparsers.add_parser( - "remote", help="Remote operations for Git dependencies" - ) - remote_subparsers = remote_parser.add_subparsers(dest="remote_cmd", required=True) - - # remote fetch: fetch version from remote GitHub pyproject.toml - fetch_parser = remote_subparsers.add_parser( - "fetch", help="Fetch version from remote GitHub pyproject.toml" - ) - fetch_parser.add_argument( - "--git-url", type=str, required=True, help="GitHub repository URL" - ) - fetch_parser.add_argument( - "--branch", type=str, default="main", help="Branch name (default: main)" - ) - fetch_parser.add_argument( - "--subdir", - type=str, - default="", - help="Subdirectory where pyproject.toml is located", - ) - - # remote update: update a local pyproject.toml with remote resolved versions. - update_parser = remote_subparsers.add_parser( - "update", help="Update local pyproject.toml with remote versions" - ) - update_parser.add_argument( - "--input", required=True, help="Path to the local pyproject.toml" - ) - update_parser.add_argument( - "--output", help="Optional output file path (defaults to overwriting the input)" - ) - - # ------------------------------------------------ - # Command: test (run pytest) - # ------------------------------------------------ - test_parser = subparsers.add_parser("test", help="Run tests using pytest") - test_parser.add_argument( - "--directory", - type=str, - default=".", - help="Directory to run tests in (default: current directory)", - ) - test_parser.add_argument( - "--num-workers", - type=int, - default=1, - help="Number of workers for parallel testing (requires pytest-xdist)", - ) - - # ------------------------------------------------ - # Command: analyze (analyze test results from JSON) - # ------------------------------------------------ - analyze_parser = subparsers.add_parser( - "analyze", help="Analyze test results from a JSON file" - ) - analyze_parser.add_argument("file", help="Path to the JSON file with test results") - analyze_parser.add_argument( - "--required-passed", type=str, help="Threshold for passed tests (e.g. 'gt:75')" - ) - analyze_parser.add_argument( - "--required-skipped", - type=str, - help="Threshold for skipped tests (e.g. 'lt:20')", - ) - - # ------------------------------------------------ - # Command: pyproject - # ------------------------------------------------ - pyproject_parser = subparsers.add_parser( - "pyproject", help="Operate on pyproject.toml dependencies" - ) - pyproject_parser.add_argument( - "--pyproject", required=True, help="Path to the pyproject.toml file" - ) - pyproject_parser.add_argument( - "--update-version", - type=str, - help="Update local dependency versions to this version", - ) - - # ------------------------------------------------ - # Dispatch Commands - # ------------------------------------------------ - args = parser.parse_args() - - if args.command == "lock": - poetry_ops.poetry_lock(directory=args.directory, file=args.file) - - elif args.command == "install": - poetry_ops.poetry_install( - directory=args.directory, - file=args.file, - extras=args.extras, - with_dev=args.dev, - all_extras=args.all_extras, - ) - - elif args.command == "build": - poetry_ops.recursive_build(directory=args.directory, file=args.file) - - elif args.command == "version": - version_ops.bump_or_set_version( - args.pyproject_file, bump=args.bump, set_ver=args.set_ver - ) - - elif args.command == "remote": - if args.remote_cmd == "fetch": - ver = remote_ops.fetch_remote_pyproject_version( - git_url=args.git_url, branch=args.branch, subdirectory=args.subdir - ) - if ver: - print(f"Fetched remote version: {ver}") - else: - print("Failed to fetch remote version.") - elif args.remote_cmd == "update": - success = remote_ops.update_and_write_pyproject(args.input, args.output) - if not success: - sys.exit(1) - - elif args.command == "test": - # Run pytest (with optional parallelism if --num-workers > 1) - poetry_ops.run_pytests( - test_directory=args.directory, num_workers=args.num_workers - ) - - elif args.command == "analyze": - test_ops.analyze_test_file( - file_path=args.file, - required_passed=args.required_passed, - required_skipped=args.required_skipped, - ) - - elif args.command == "pyproject": - print("Extracting dependencies from pyproject.toml ...") - paths = pyproject_ops.extract_path_dependencies(args.pyproject) - if paths: - print("Local (path) dependencies:") - print(", ".join(paths)) - else: - print("No local path dependencies found.") - - git_deps = pyproject_ops.extract_git_dependencies(args.pyproject) - if git_deps: - print("\nGit dependencies:") - for name, details in git_deps.items(): - print(f"{name}: {details}") - else: - print("No Git dependencies found.") - - if args.update_version: - print(f"\nUpdating local dependency versions to {args.update_version} ...") - pyproject_ops.update_dependency_versions( - args.pyproject, args.update_version - ) - - else: - parser.print_help() - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/pkgs/tooling/monorepo_manager/monorepo_manager/poetry_ops.py b/pkgs/tooling/monorepo_manager/monorepo_manager/poetry_ops.py deleted file mode 100644 index d2667a3ef..000000000 --- a/pkgs/tooling/monorepo_manager/monorepo_manager/poetry_ops.py +++ /dev/null @@ -1,224 +0,0 @@ -#!/usr/bin/env python3 -""" -poetry_ops.py - -Provides functions to: - - Install Poetry - - Run 'poetry lock' and 'poetry install' - - Extract path dependencies from pyproject.toml - - Recursively build packages (based on the dependencies) - - Show the installed packages (via pip freeze) - - Set versions and dependency versions in pyproject.toml files - - Publish built packages to PyPI - - Publish packages based on path dependencies - -Intended for use in a unified monorepo management CLI. -""" - -import os -import subprocess -import sys -import toml - - -def run_command(command, cwd=None): - """Run a shell command and handle errors.""" - try: - result = subprocess.run( - command, - cwd=cwd, - text=True, - capture_output=True, - shell=True, - check=True, - ) - if result.stdout: - print(result.stdout) - return result.stdout.strip() - except subprocess.CalledProcessError as e: - print(f"Error running command: {e.stderr}", file=sys.stderr) - sys.exit(e.returncode) - - -def install_poetry(): - """Install Poetry.""" - print("Installing Poetry...") - run_command("curl -sSL https://install.python-poetry.org | python3") - # Update PATH so that ~/.local/bin is included for subsequent commands. - os.environ["PATH"] = ( - f"{os.path.expanduser('~')}/.local/bin:{os.environ.get('PATH', '')}" - ) - - -def poetry_lock(directory=None, file=None): - """ - Run 'poetry lock' in the specified directory or on the specified file's directory. - - :param directory: Directory containing pyproject.toml. - :param file: Path to a specific pyproject.toml file. - """ - location = directory if directory else os.path.dirname(file) - print(f"Generating poetry.lock in {location}...") - run_command("poetry lock", cwd=location) - - -def poetry_install( - directory=None, file=None, extras=None, with_dev=False, all_extras=False -): - """ - Run 'poetry install' in the specified directory or file. - - :param directory: Directory containing pyproject.toml. - :param file: Path to a specific pyproject.toml file. - :param extras: Extras to include (e.g., "full"). - :param with_dev: Boolean flag to include dev dependencies. - :param all_extras: Boolean flag to include all extras. - """ - location = directory if directory else os.path.dirname(file) - print(f"Installing dependencies in {location}...") - command = ["poetry", "install", "--no-cache", "-vv"] - if all_extras: - command.append("--all-extras") - elif extras: - command.append(f"--extras {extras}") - if with_dev: - command.append("--with dev") - run_command(" ".join(command), cwd=location) - - -def extract_path_dependencies(pyproject_path): - """ - Extract path dependencies from a pyproject.toml file. - - Looks for dependency entries that are defined as tables with a "path" key. - - :param pyproject_path: Path to the pyproject.toml file. - :return: List of dependency paths found. - """ - print(f"Extracting path dependencies from {pyproject_path}...") - try: - with open(pyproject_path, "r") as f: - data = toml.load(f) - except Exception as e: - print(f"Error reading {pyproject_path}: {e}", file=sys.stderr) - sys.exit(1) - - dependencies = data.get("tool", {}).get("poetry", {}).get("dependencies", {}) - path_deps = [ - v["path"] for v in dependencies.values() if isinstance(v, dict) and "path" in v - ] - return path_deps - - -def recursive_build(directory=None, file=None): - """ - Recursively build packages based on path dependencies extracted from a pyproject.toml. - - :param directory: Directory containing pyproject.toml. - :param file: Specific pyproject.toml file to use. - """ - pyproject_path = file if file else os.path.join(directory, "pyproject.toml") - base_dir = os.path.dirname(pyproject_path) - dependencies = extract_path_dependencies(pyproject_path) - - print("Building specified packages...") - for package_path in dependencies: - full_path = os.path.join(base_dir, package_path) - pyproject_file = os.path.join(full_path, "pyproject.toml") - if os.path.isdir(full_path) and os.path.isfile(pyproject_file): - print(f"Building package: {full_path}") - run_command("poetry build", cwd=full_path) - else: - print(f"Skipping {full_path}: not a valid package directory") - - -def show_pip_freeze(): - """ - Show the installed packages using pip freeze. - """ - print("Installed packages (pip freeze):") - run_command("pip freeze") - - -def publish_package(directory=None, file=None, username=None, password=None): - """ - Build and publish packages to PyPI. - - :param directory: Directory containing one or more packages. - :param file: Specific pyproject.toml file to use. - :param username: PyPI username. - :param password: PyPI password. - """ - if directory: - print(f"Publishing all packages in {directory} and its subdirectories...") - for root, dirs, files in os.walk(directory): - if "pyproject.toml" in files: - print(f"Publishing package from {root}...") - run_command("poetry build", cwd=root) - run_command( - f"poetry publish --username {username} --password {password}", - cwd=root, - ) - elif file: - location = os.path.dirname(file) - print(f"Publishing package from {location}...") - run_command("poetry build", cwd=location) - run_command( - f"poetry publish --username {username} --password {password}", - cwd=location, - ) - else: - print("Error: Either a directory or a file must be specified.", file=sys.stderr) - sys.exit(1) - - -def publish_from_dependencies(directory=None, file=None, username=None, password=None): - """ - Build and publish packages based on path dependencies defined in a pyproject.toml. - - :param directory: Directory containing the base pyproject.toml. - :param file: Specific pyproject.toml file. - :param username: PyPI username. - :param password: PyPI password. - """ - pyproject_path = file if file else os.path.join(directory, "pyproject.toml") - if not os.path.isfile(pyproject_path): - print(f"pyproject.toml not found at {pyproject_path}", file=sys.stderr) - sys.exit(1) - - base_dir = os.path.dirname(pyproject_path) - dependencies = extract_path_dependencies(pyproject_path) - print("Building and publishing packages based on path dependencies...") - for package_path in dependencies: - full_path = os.path.join(base_dir, package_path) - pyproject_file = os.path.join(full_path, "pyproject.toml") - if os.path.isdir(full_path) and os.path.isfile(pyproject_file): - print(f"Building and publishing package: {full_path}") - run_command("poetry build", cwd=full_path) - run_command( - f"poetry publish --username {username} --password {password}", - cwd=full_path, - ) - else: - print(f"Skipping {full_path}: not a valid package directory") - - -def run_pytests(test_directory=".", num_workers=1): - """ - Run pytest in the specified directory. - - If num_workers is greater than 1, uses pytest‑xdist to run tests in parallel. - - :param test_directory: Directory in which to run tests (default: current directory). - :param num_workers: Number of workers to use (default: 1). Requires pytest-xdist when > 1. - """ - command = "pytest" - try: - workers = int(num_workers) - except ValueError: - print("Error: num_workers must be an integer", file=sys.stderr) - sys.exit(1) - if workers > 1: - command += f" -n {workers}" - print(f"Running tests in '{test_directory}' with command: {command}") - run_command(command, cwd=test_directory) diff --git a/pkgs/tooling/monorepo_manager/monorepo_manager/pyproject_ops.py b/pkgs/tooling/monorepo_manager/monorepo_manager/pyproject_ops.py deleted file mode 100644 index c6ff3cfd4..000000000 --- a/pkgs/tooling/monorepo_manager/monorepo_manager/pyproject_ops.py +++ /dev/null @@ -1,197 +0,0 @@ -#!/usr/bin/env python3 -""" -pyproject_ops.py - -Provides functions for operating on pyproject.toml files: - - extract_path_dependencies: Retrieves dependencies that are defined with a "path". - - extract_git_dependencies: Retrieves dependencies that are defined with a "git" key. - - update_dependency_versions: For local dependencies (with a "path"), update their version in the parent - pyproject.toml and optionally in the dependency’s own pyproject.toml. - -These functions can be used independently or integrated into a larger monorepo management tool. -""" - -import os -import sys -import toml - - -def extract_path_dependencies(pyproject_path): - """ - Extract local (path) dependencies from a pyproject.toml file. - - Looks for dependencies in [tool.poetry.dependencies] that are dictionaries containing a "path" key. - - Args: - pyproject_path (str): Path to the pyproject.toml file. - - Returns: - list: A list of path strings extracted from the dependency definitions. - """ - try: - with open(pyproject_path, "r") as f: - data = toml.load(f) - except Exception as e: - print(f"Error reading {pyproject_path}: {e}", file=sys.stderr) - sys.exit(1) - - dependencies = data.get("tool", {}).get("poetry", {}).get("dependencies", {}) - path_deps = [ - value["path"] - for value in dependencies.values() - if isinstance(value, dict) and "path" in value - ] - return path_deps - - -def extract_git_dependencies(pyproject_path): - """ - Extract Git-based dependencies from a pyproject.toml file. - - Looks for dependencies in [tool.poetry.dependencies] that are dictionaries containing a "git" key. - - Args: - pyproject_path (str): Path to the pyproject.toml file. - - Returns: - dict: A dictionary mapping dependency names to their details dictionaries. - """ - try: - with open(pyproject_path, "r") as f: - data = toml.load(f) - except Exception as e: - print(f"Error reading {pyproject_path}: {e}", file=sys.stderr) - sys.exit(1) - - dependencies = data.get("tool", {}).get("poetry", {}).get("dependencies", {}) - git_deps = { - name: details - for name, details in dependencies.items() - if isinstance(details, dict) and "git" in details - } - return git_deps - - -def update_dependency_versions(pyproject_path, new_version): - """ - Update versions for local (path) dependencies in a pyproject.toml file. - - For each dependency that is defined as a table with a "path" key: - - The dependency’s version is updated to f"^{new_version}" in the parent pyproject.toml. - - Attempts to update the dependency's own pyproject.toml (if found in the given path) - by setting its version to new_version. - - Args: - pyproject_path (str): Path to the parent pyproject.toml file. - new_version (str): The new version string to set (without the caret). - - Returns: - None - """ - try: - with open(pyproject_path, "r") as f: - data = toml.load(f) - except Exception as e: - print(f"Error reading {pyproject_path}: {e}", file=sys.stderr) - sys.exit(1) - - poetry_section = data.get("tool", {}).get("poetry", {}) - dependencies = poetry_section.get("dependencies", {}) - updated_deps = {} - base_dir = os.path.dirname(pyproject_path) - - for dep_name, details in dependencies.items(): - if isinstance(details, dict) and "path" in details: - # Create a new dependency definition with an updated version. - new_dep = {"version": f"^{new_version}"} - # Preserve any additional keys (except we override version). - for key, value in details.items(): - if key != "path": - new_dep[key] = value - updated_deps[dep_name] = new_dep - - # Attempt to update the dependency's own pyproject.toml (if it exists). - dependency_path = os.path.join(base_dir, details["path"]) - dependency_pyproject = os.path.join(dependency_path, "pyproject.toml") - if os.path.isfile(dependency_pyproject): - try: - with open(dependency_pyproject, "r") as dep_file: - dep_data = toml.load(dep_file) - if "tool" in dep_data and "poetry" in dep_data["tool"]: - dep_data["tool"]["poetry"]["version"] = new_version - with open(dependency_pyproject, "w") as dep_file: - toml.dump(dep_data, dep_file) - print( - f"Updated {dependency_pyproject} to version {new_version}" - ) - else: - print( - f"Invalid structure in {dependency_pyproject}", - file=sys.stderr, - ) - except Exception as e: - print( - f"Error updating {dependency_pyproject}: {e}", file=sys.stderr - ) - else: - updated_deps[dep_name] = details - - # Write the updated dependencies back to the parent pyproject.toml. - data["tool"]["poetry"]["dependencies"] = updated_deps - try: - with open(pyproject_path, "w") as f: - toml.dump(data, f) - print(f"Updated dependency versions in {pyproject_path}") - except Exception as e: - print(f"Error writing updated file {pyproject_path}: {e}", file=sys.stderr) - sys.exit(1) - - -def main(): - """ - Provides a basic CLI for testing pyproject.toml operations. - - Usage Examples: - - Extract dependencies: - python pyproject_ops.py --pyproject path/to/pyproject.toml - - Update dependency versions (for local path dependencies): - python pyproject_ops.py --pyproject path/to/pyproject.toml --update-version 2.0.0 - """ - import argparse - - parser = argparse.ArgumentParser( - description="Operate on pyproject.toml dependencies" - ) - parser.add_argument( - "--pyproject", - required=True, - help="Path to the pyproject.toml file", - ) - parser.add_argument( - "--update-version", - help="If provided, update local dependencies to this version", - ) - args = parser.parse_args() - - print("Extracting local (path) dependencies:") - paths = extract_path_dependencies(args.pyproject) - if paths: - print(", ".join(paths)) - else: - print("No path dependencies found.") - - print("\nExtracting Git dependencies:") - git_deps = extract_git_dependencies(args.pyproject) - if git_deps: - for name, details in git_deps.items(): - print(f"{name}: {details}") - else: - print("No Git dependencies found.") - - if args.update_version: - print(f"\nUpdating dependency versions to {args.update_version} ...") - update_dependency_versions(args.pyproject, args.update_version) - - -if __name__ == "__main__": - main() diff --git a/pkgs/tooling/monorepo_manager/monorepo_manager/remote_ops.py b/pkgs/tooling/monorepo_manager/monorepo_manager/remote_ops.py deleted file mode 100644 index f0a2fcc3d..000000000 --- a/pkgs/tooling/monorepo_manager/monorepo_manager/remote_ops.py +++ /dev/null @@ -1,197 +0,0 @@ -#!/usr/bin/env python3 -""" -remote_ops.py - -Provides functions to: - - Fetch the version from a remote GitHub repository's pyproject.toml. - - Update a local pyproject.toml by resolving Git dependencies: - - For dependencies defined with a 'git' key, - - Replace their version with an inline table containing the fetched version, - - Mark dependencies as optional. - - Write the updated pyproject.toml to a file (or overwrite the input file). - -Intended for use in a unified monorepo management CLI. -""" - -from urllib.parse import urljoin - -import requests -from tomlkit import parse, dumps, inline_table - - -def fetch_remote_pyproject_version(git_url, branch="main", subdirectory=""): - """ - Fetches the version string from a remote pyproject.toml in a GitHub repository. - - Args: - git_url (str): The Git repository URL (must be a GitHub URL). - branch (str): The branch to fetch the file from (default: "main"). - subdirectory (str): The subdirectory in the repo where the pyproject.toml is located (if any). - - Returns: - str or None: The version string if found, otherwise None. - """ - try: - if "github.com" not in git_url: - raise ValueError("Only GitHub repositories are supported by this function.") - - # Remove trailing .git if present. - repo_path = git_url.split("github.com/")[1] - if repo_path.endswith(".git"): - repo_path = repo_path[:-4] - - # Build the raw URL; ensure subdirectory ends with "/" if provided. - base_url = f"https://raw.githubusercontent.com/{repo_path}/{branch}/" - if subdirectory and not subdirectory.endswith("/"): - subdirectory += "/" - pyproject_url = urljoin(base_url, f"{subdirectory}pyproject.toml") - - response = requests.get(pyproject_url) - response.raise_for_status() - doc = parse(response.text) - version = doc.get("tool", {}).get("poetry", {}).get("version") - if version is None: - print( - f"Version key not found in remote pyproject.toml from {pyproject_url}" - ) - return version - except Exception as e: - print(f"Error fetching pyproject.toml from {git_url}: {e}") - return None - - -def update_pyproject_with_versions(file_path): - """ - Reads the local pyproject.toml file and updates Git-based dependencies. - - For dependencies defined as a table with a 'git' key, it: - - Fetches the version from the remote repository. - - Creates an inline table for the dependency with the resolved version (prefixed with '^'). - - Marks the dependency as optional. - Also ensures that dependencies referenced in extras are marked as optional. - - Args: - file_path (str): Path to the local pyproject.toml file. - - Returns: - tomlkit.document.Document: The updated TOML document. - If an error occurs, prints the error and returns None. - """ - try: - with open(file_path, "r") as f: - content = f.read() - doc = parse(content) - except Exception as e: - print(f"Error reading {file_path}: {e}") - return None - - try: - tool_section = doc["tool"] - poetry_section = tool_section["poetry"] - except KeyError: - print(f"Error: Invalid pyproject.toml structure in {file_path}.", flush=True) - return None - - dependencies = poetry_section.get("dependencies", {}) - extras = poetry_section.get("extras", {}) - - for dep_name, details in dependencies.items(): - # Process only Git-based dependencies. - if isinstance(details, dict) and "git" in details: - git_url = details["git"] - branch = details.get("branch", "main") - subdirectory = details.get("subdirectory", "") - print(f"Updating dependency '{dep_name}':") - print(f" Repository: {git_url}") - print(f" Branch: {branch}") - print(f" Subdirectory: {subdirectory}") - remote_version = fetch_remote_pyproject_version( - git_url, branch=branch, subdirectory=subdirectory - ) - if remote_version: - print(f" Fetched version: {remote_version}") - # Create an inline table with the resolved version and mark as optional. - dep_inline = inline_table() - dep_inline["version"] = f"^{remote_version}" - dep_inline["optional"] = True - dependencies[dep_name] = dep_inline - else: - print( - f" Could not fetch remote version for '{dep_name}'. Marking as optional." - ) - # Mark as optional if version could not be fetched. - details["optional"] = True - dependencies[dep_name] = details - else: - # If the dependency appears in extras but is just a string, convert it to an inline table and mark as optional. - for extra_name, extra_deps in extras.items(): - if dep_name in extra_deps: - if isinstance(details, str): - dep_inline = inline_table() - dep_inline["version"] = details - dep_inline["optional"] = True - dependencies[dep_name] = dep_inline - elif isinstance(details, dict): - details["optional"] = True - break # Only need to update once. - - # Clean the extras section: ensure each extra only contains dependencies that exist. - for extra_name, extra_deps in extras.items(): - extras[extra_name] = [dep for dep in extra_deps if dep in dependencies] - - # Update the document. - poetry_section["dependencies"] = dependencies - poetry_section["extras"] = extras - return doc - - -def update_and_write_pyproject(input_file_path, output_file_path=None): - """ - Updates the specified pyproject.toml file with resolved versions for Git-based dependencies - and writes the updated document to a file. - - Args: - input_file_path (str): Path to the original pyproject.toml file. - output_file_path (str, optional): Path to write the updated file. - If not provided, the input file is overwritten. - - Returns: - bool: True if the update and write succeed, False otherwise. - """ - updated_doc = update_pyproject_with_versions(input_file_path) - if updated_doc is None: - print("Failed to update the pyproject.toml document.") - return False - - # Overwrite input file if output file not provided. - output_file_path = output_file_path or input_file_path - - try: - with open(output_file_path, "w") as f: - f.write(dumps(updated_doc)) - print(f"Updated pyproject.toml written to {output_file_path}") - return True - except Exception as e: - print(f"Error writing updated pyproject.toml: {e}") - return False - - -# Example usage when running this module directly. -if __name__ == "__main__": - import argparse - - parser = argparse.ArgumentParser( - description="Update local pyproject.toml with versions fetched from Git dependencies." - ) - parser.add_argument( - "--input", required=True, help="Path to the local pyproject.toml to update" - ) - parser.add_argument( - "--output", - help="Optional output file path (if not specified, overwrites input)", - ) - args = parser.parse_args() - - success = update_and_write_pyproject(args.input, args.output) - if not success: - exit(1) diff --git a/pkgs/tooling/monorepo_manager/monorepo_manager/test_ops.py b/pkgs/tooling/monorepo_manager/monorepo_manager/test_ops.py deleted file mode 100644 index 58be370a7..000000000 --- a/pkgs/tooling/monorepo_manager/monorepo_manager/test_ops.py +++ /dev/null @@ -1,224 +0,0 @@ -#!/usr/bin/env python3 -""" -test_ops.py - -Provides functions to analyze test result data from a JSON file. - -The JSON file is expected to have: - - A "summary" section with keys: "total", "passed", "failed", "skipped". - - A "tests" list, where each test contains an "outcome" (e.g., "passed", "failed", "skipped") - and a "keywords" list for tags. - -The module: - - Reads the JSON file. - - Prints a summary table with counts and percentages. - - Groups tests by tags (excluding unwanted tags such as "tests", tags starting with "test_", tags - ending with "_test.py", or empty tags). - - Checks threshold conditions for passed and skipped percentages (if provided) and exits with an error - code if the conditions are not satisfied. -""" - -import json -import sys -import argparse - - -def parse_arguments(args): - """Parse command-line arguments.""" - parser = argparse.ArgumentParser( - description="Analyze test results from a JSON file." - ) - parser.add_argument("file", help="Path to the JSON file containing test results") - parser.add_argument( - "--required-passed", - type=str, - help=( - "Required passed percentage threshold (e.g., 'gt:50', 'lt:30', 'eq:50', " - "'ge:50', 'le:50')" - ), - ) - parser.add_argument( - "--required-skipped", - type=str, - help=( - "Required skipped percentage threshold (e.g., 'gt:20', 'lt:50', 'eq:50', " - "'ge:50', 'le:50')" - ), - ) - return parser.parse_args(args) - - -def evaluate_threshold(value, threshold): - """ - Evaluate if the given value meets the specified threshold condition. - - The threshold format should be: operator:limit (e.g., "gt:50"). - Supported operators: - - gt: greater than - - lt: less than - - eq: equal to - - ge: greater than or equal to - - le: less than or equal to - - Returns: - bool: True if the condition is met, False otherwise. - """ - try: - op, limit = threshold.split(":") - limit = float(limit) - except ValueError as e: - raise ValueError( - f"Invalid threshold format '{threshold}'. Expected format: 'gt:' etc." - ) from e - - if op == "gt": - return value > limit - elif op == "lt": - return value < limit - elif op == "eq": - return value == limit - elif op == "ge": - return value >= limit - elif op == "le": - return value <= limit - else: - raise ValueError( - f"Invalid operator '{op}'. Use one of: 'gt', 'lt', 'eq', 'ge', 'le'." - ) - - -def analyze_test_file(file_path, required_passed=None, required_skipped=None): - """ - Analyzes a JSON file with test results. - - The function: - - Prints a summary table with the total count and percentage for each outcome. - - Checks if the percentage of passed or skipped tests meet the specified thresholds. - - Groups tests by tags (excluding tags that are deemed irrelevant). - - Prints detailed tag-based results. - - If thresholds are not met, the function exits with an error. - - Args: - file_path (str): Path to the JSON file. - required_passed (str, optional): Threshold for passed tests (e.g., "gt:50"). - required_skipped (str, optional): Threshold for skipped tests (e.g., "lt:20"). - """ - try: - with open(file_path, "r") as f: - data = json.load(f) - except FileNotFoundError: - print(f"Error: File not found: {file_path}") - sys.exit(1) - except json.JSONDecodeError: - print(f"Error: Could not decode JSON from {file_path}") - sys.exit(1) - except Exception as e: - print(f"Unexpected error reading {file_path}: {e}") - sys.exit(1) - - summary = data.get("summary", {}) - tests = data.get("tests", []) - if not summary or not tests: - print("No test data or summary found in the provided file.") - sys.exit(1) - - total_tests = summary.get("total", 0) - print("\nTest Results Summary:") - print(f"{'Category':<15}{'Count':<10}{'Total':<10}{'% of Total':<10}") - print("-" * 50) - for category in ["passed", "skipped", "failed"]: - count = summary.get(category, 0) - percentage = (count / total_tests) * 100 if total_tests > 0 else 0 - print( - f"{category.capitalize():<15}{count:<10}{total_tests:<10}{percentage:<10.2f}" - ) - - # Calculate percentages for threshold evaluation. - passed_pct = ( - (summary.get("passed", 0) / total_tests) * 100 if total_tests > 0 else 0 - ) - skipped_pct = ( - (summary.get("skipped", 0) / total_tests) * 100 if total_tests > 0 else 0 - ) - - threshold_error = False - if required_passed and not evaluate_threshold(passed_pct, required_passed): - print( - f"\nWARNING: Passed percentage ({passed_pct:.2f}%) does not meet the condition '{required_passed}'!" - ) - threshold_error = True - - if required_skipped and not evaluate_threshold(skipped_pct, required_skipped): - print( - f"WARNING: Skipped percentage ({skipped_pct:.2f}%) does not meet the condition '{required_skipped}'!" - ) - threshold_error = True - - # Group tests by tags. - tag_outcomes = {} - for test in tests: - outcome = test.get("outcome", "").lower() - for tag in test.get("keywords", []): - # Exclude unwanted tags. - if ( - tag == "tests" - or tag.startswith("test_") - or tag.endswith("_test.py") - or tag.strip() == "" - ): - continue - if tag not in tag_outcomes: - tag_outcomes[tag] = {"passed": 0, "skipped": 0, "failed": 0, "total": 0} - tag_outcomes[tag]["total"] += 1 - if outcome == "passed": - tag_outcomes[tag]["passed"] += 1 - elif outcome == "skipped": - tag_outcomes[tag]["skipped"] += 1 - elif outcome == "failed": - tag_outcomes[tag]["failed"] += 1 - - print("\nTag-Based Results:") - header = f"{'Tag':<30}{'Passed':<10}{'Skipped':<10}{'Failed':<10}{'Total':<10}{'% Passed':<10}{'% Skipped':<10}{'% Failed':<10}" - print(header) - print("-" * len(header)) - # Sort tags by percentage passed descending then alphabetically. - sorted_tags = sorted( - tag_outcomes.items(), - key=lambda item: ( - -( - item[1]["passed"] / item[1]["total"] * 100 - if item[1]["total"] > 0 - else 0 - ), - item[0], - ), - ) - for tag, outcomes in sorted_tags: - total = outcomes["total"] - passed_pct = (outcomes["passed"] / total * 100) if total > 0 else 0 - skipped_pct = (outcomes["skipped"] / total * 100) if total > 0 else 0 - failed_pct = (outcomes["failed"] / total * 100) if total > 0 else 0 - print( - f"{tag:<30}{outcomes['passed']:<10}{outcomes['skipped']:<10}{outcomes['failed']:<10}" - f"{total:<10}{passed_pct:<10.2f}{skipped_pct:<10.2f}{failed_pct:<10.2f}" - ) - - # If thresholds are not met, exit with a non-zero status code. - if threshold_error: - sys.exit(1) - else: - print("\nTest analysis completed successfully.") - - -def main(): - args = parse_arguments(sys.argv[1:]) - analyze_test_file( - file_path=args.file, - required_passed=args.required_passed, - required_skipped=args.required_skipped, - ) - - -if __name__ == "__main__": - main() diff --git a/pkgs/tooling/monorepo_manager/monorepo_manager/version_ops.py b/pkgs/tooling/monorepo_manager/monorepo_manager/version_ops.py deleted file mode 100644 index 740ae0a2a..000000000 --- a/pkgs/tooling/monorepo_manager/monorepo_manager/version_ops.py +++ /dev/null @@ -1,224 +0,0 @@ -#!/usr/bin/env python3 -""" -version_ops.py - -Provides functions to: - - Read the current version from pyproject.toml. - - Bump the current version (major, minor, patch) or finalize a dev release. - - Validate that a user-provided new version is not lower than the current one. - - Update the pyproject.toml with the new version. - -Intended for use in a unified monorepo management CLI. -""" - -import sys -from packaging.version import Version, InvalidVersion -from tomlkit import parse, dumps - - -def read_pyproject_version(file_path): - """ - Reads the current version from the provided pyproject.toml file. - - Args: - file_path (str): Path to the pyproject.toml file. - - Returns: - tuple: A tuple containing the current version string and the - tomlkit Document representing the file. - Raises: - KeyError: If the version key is missing. - """ - try: - with open(file_path, "r") as f: - content = f.read() - except Exception as e: - print(f"Error reading {file_path}: {e}", file=sys.stderr) - sys.exit(1) - - doc = parse(content) - try: - version = doc["tool"]["poetry"]["version"] - except KeyError: - raise KeyError( - "No version found under [tool.poetry] in the given pyproject.toml" - ) - return version, doc - - -def bump_version(current_version, bump_type): - """ - Bumps the current version up using semantic versioning. - Supports: - - Bumping stable versions (major, minor, patch) which also start a dev cycle. - - Bumping within a dev cycle. - - Finalizing a dev version (removing the .dev suffix). - - Args: - current_version (str): The current version (e.g. "1.0.0" or "1.0.1.dev2"). - bump_type (str): One of "major", "minor", "patch", or "finalize". - - Returns: - str: The new version string. - Raises: - ValueError: If the current version is invalid or the bump operation cannot be performed. - """ - try: - ver = Version(current_version) - except InvalidVersion as e: - raise ValueError(f"Invalid current version '{current_version}': {e}") - - # Check if it's a dev release - is_dev = ver.dev is not None - major, minor, patch = ver.release - - if bump_type == "finalize": - if is_dev: - # Remove the dev segment - new_version = f"{major}.{minor}.{patch}" - else: - raise ValueError("Current version is stable; nothing to finalize.") - elif bump_type == "major": - major += 1 - minor = 0 - patch = 0 - new_version = f"{major}.{minor}.{patch}.dev1" - elif bump_type == "minor": - minor += 1 - patch = 0 - new_version = f"{major}.{minor}.{patch}.dev1" - elif bump_type == "patch": - if is_dev: - # Increment the dev counter if already in a dev cycle. - new_dev = ver.dev + 1 - new_version = f"{major}.{minor}.{patch}.dev{new_dev}" - else: - patch += 1 - new_version = f"{major}.{minor}.{patch}.dev1" - else: - raise ValueError( - "bump_type must be one of: 'major', 'minor', 'patch', or 'finalize'" - ) - - return new_version - - -def validate_and_set_version(current_version, new_version): - """ - Validates that the new version is not lower than the current version. - - Args: - current_version (str): The current version string. - new_version (str): The target version string. - - Returns: - str: The new version if it is valid. - Raises: - ValueError: If new_version is lower than current_version. - """ - try: - cur_ver = Version(current_version) - tgt_ver = Version(new_version) - except InvalidVersion as e: - raise ValueError(f"Invalid version provided: {e}") - - if tgt_ver < cur_ver: - raise ValueError( - "You cannot bump the version downwards. The target version must be higher than the current version." - ) - - return new_version - - -def update_pyproject_version(file_path, new_version): - """ - Updates the pyproject.toml file with the new version. - - Args: - file_path (str): The path to the pyproject.toml file. - new_version (str): The new version string. - - Returns: - None - """ - try: - current_version, doc = read_pyproject_version(file_path) - except Exception as e: - print(f"Error: {e}", file=sys.stderr) - sys.exit(1) - - # Update the version field if it exists - if "tool" in doc and "poetry" in doc["tool"]: - doc["tool"]["poetry"]["version"] = new_version - else: - print( - f"Error: Invalid pyproject.toml structure in {file_path}.", file=sys.stderr - ) - sys.exit(1) - - try: - with open(file_path, "w") as f: - f.write(dumps(doc)) - except Exception as e: - print(f"Error writing updated pyproject.toml: {e}", file=sys.stderr) - sys.exit(1) - - print(f"Bumped version from {current_version} to {new_version} in {file_path}.") - - -def bump_or_set_version(pyproject_file, bump=None, set_ver=None): - """ - Executes either a version bump or a direct version set on the given pyproject.toml file. - - Args: - pyproject_file (str): Path to the pyproject.toml file. - bump (str, optional): The type of bump ("major", "minor", "patch", or "finalize"). - set_ver (str, optional): A specific version string to set. - - Returns: - None - """ - try: - current_version, _ = read_pyproject_version(pyproject_file) - except Exception as e: - print(f"Error reading current version: {e}", file=sys.stderr) - sys.exit(1) - - try: - if bump: - new_version = bump_version(current_version, bump) - elif set_ver: - new_version = validate_and_set_version(current_version, set_ver) - else: - print("No version operation specified.", file=sys.stderr) - sys.exit(1) - except ValueError as e: - print(f"Error: {e}", file=sys.stderr) - sys.exit(1) - - update_pyproject_version(pyproject_file, new_version) - - -# Example usage when running this module directly. -if __name__ == "__main__": - import argparse - - parser = argparse.ArgumentParser( - description="Bump or set version in pyproject.toml using semantic versioning." - ) - parser.add_argument("file", help="Path to the pyproject.toml file") - - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument( - "--bump", - choices=["major", "minor", "patch", "finalize"], - help="Type of version bump to perform", - ) - group.add_argument( - "--set", - dest="set_ver", - help="Set the version explicitly (e.g. 1.2.3 or 1.2.3.dev1)", - ) - - args = parser.parse_args() - bump_or_set_version(args.file, bump=args.bump, set_ver=args.set_ver) diff --git a/pkgs/tooling/monorepo_manager/pyproject.toml b/pkgs/tooling/monorepo_manager/pyproject.toml deleted file mode 100644 index 86a085fe9..000000000 --- a/pkgs/tooling/monorepo_manager/pyproject.toml +++ /dev/null @@ -1,21 +0,0 @@ -[tool.poetry] -name = "monorepo-manager" -version = "0.6.1.dev9" -description = "A CLI for managing a Python monorepo" -authors = [ - { name="Your Name", email="you@example.com" } -] - -[tool.poetry.dependencies] -python = ">=3.10,<3.13" -requests = "*" -tomlkit = "*" -packaging = "*" - - -[tool.project.scripts] -monorepo-manager = "monorepo_manager.cli:main" - -[build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta" \ No newline at end of file