diff --git a/.github/workflows/pipeline.yaml b/.github/workflows/pipeline.yaml index 4d06098..288c490 100644 --- a/.github/workflows/pipeline.yaml +++ b/.github/workflows/pipeline.yaml @@ -15,6 +15,8 @@ on: - feat/* - hotfix/* - main + # Allows CodSpeed to trigger backtest performance analysis in order to generate initial data + workflow_dispatch: jobs: init: @@ -52,7 +54,8 @@ jobs: # Run all test suites step: - static - - unit + - unit-simple + - unit-codspeed # Run on all supported Python versions python-version: - "3.11" @@ -103,9 +106,25 @@ jobs: - name: Configure environment variables run: echo "${{ secrets.DOTENV_UNIT_TESTS }}" > .env - - name: Run tests + - name: Run test servers (unit) + if: ${{ contains(matrix.step, 'unit') }} + run: make run-test-servers + + - name: Run tests (simple) + if: ${{ !contains(matrix.step, 'codspeed') }} run: make test-${{ matrix.step }} version_full=${{ needs.init.outputs.VERSION_FULL }} + - name: Run tests (CodSpeed) + if: ${{ contains(matrix.step, 'codspeed') }} + uses: CodSpeedHQ/action@v3.2.0 + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: make test-${{ matrix.step }} version_full=${{ needs.init.outputs.VERSION_FULL }} + + - name: Kill test servers (unit) + if: ${{ contains(matrix.step, 'unit') }} + run: make kill-test-servers + - name: Upload artifacts uses: actions/upload-artifact@v4.3.6 if: always() diff --git a/.gitignore b/.gitignore index e60821f..31bdf94 100644 --- a/.gitignore +++ b/.gitignore @@ -291,3 +291,9 @@ test-reports/ # Local .env !.env.example .env.* + +# CodSpeed +.codspeed/ + +# Test servers PIDs +*.pid diff --git a/Makefile b/Makefile index 2077816..fb2839b 100644 --- a/Makefile +++ b/Makefile @@ -53,7 +53,9 @@ upgrade: test: $(MAKE) test-static - $(MAKE) test-unit + $(MAKE) run-test-servers + $(MAKE) test-unit-simple + $(MAKE) kill-test-servers test-static: @echo "➡️ Test dependencies issues (deptry)..." @@ -65,18 +67,35 @@ test-static: @echo "➡️ Test types (Pyright)..." uv run pyright -test-unit: - bash cicd/test-unit-ci.sh +run-test-servers: + @echo "➡️ Starting Chromium server..." + python3 -m cicd.run_chromium 1>/dev/null 2>&1 & echo "$$!" > .chromium.pid -test-static-server: @echo "➡️ Starting local static server..." - python3 -m http.server -d ./tests/websites 8000 + python3 -m http.server -d ./tests/websites 8000 1>/dev/null 2>&1 & echo "$$!" > .static_server.pid -test-unit-run: - @echo "➡️ Unit tests (Pytest)..." - uv run pytest \ - --junit-xml=test-reports/$(version_full).xml \ - --log-file=test-reports/$(version_full).log \ +kill-test-servers: + @echo "➡️ Killing Chromium server..." + kill -s SIGKILL $(shell cat .chromium.pid) + + @echo "➡️ Killing local static server..." + kill -s SIGKILL $(shell cat .static_server.pid) + +test-unit-simple: + @echo "➡️ Unit tests with no extra (Pytest)..." + CI=true uv run pytest \ + --junit-xml=test-reports/$(version_full)-simple.xml \ + --log-file=test-reports/$(version_full)-simple.log \ + --maxprocesses=4 \ + -n=logical \ + tests/*.py + +test-unit-codspeed: + @echo "➡️ Unit tests with CodSpeed (Pytest)..." + CI=true uv run pytest \ + --codspeed \ + --junit-xml=test-reports/$(version_full)-codspeed.xml \ + --log-file=test-reports/$(version_full)-codspeed.log \ --maxprocesses=4 \ -n=logical \ tests/*.py diff --git a/README.md b/README.md index 208c2a0..09afff3 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,7 @@ Web scraper made for AI and simplicity in mind. It runs as a CLI that can be par [![GitHub project license](https://img.shields.io/github/license/clemlesne/scrape-it-now)](https://github.com/clemlesne/scrape-it-now/blob/main/LICENSE) [![PyPI package version](https://img.shields.io/pypi/v/scrape-it-now)](https://pypi.org/project/scrape-it-now) [![PyPI supported Python versions](https://img.shields.io/pypi/pyversions/scrape-it-now)](https://pypi.org/project/scrape-it-now) +[![CodSpeed report](https://img.shields.io/endpoint?url=https://codspeed.io/badge.json)](https://codspeed.io/clemlesne/scrape-it-now) ## Features diff --git a/cicd/run_chromium.py b/cicd/run_chromium.py new file mode 100644 index 0000000..4c5c360 --- /dev/null +++ b/cicd/run_chromium.py @@ -0,0 +1,31 @@ +import asyncio +from contextlib import suppress +from logging import getLogger + +from playwright.async_api import async_playwright + +CDP_PORT = 9222 + +logger = getLogger(__name__) + + +async def run(): + async with async_playwright() as p: + # Launch Chromium in server mode with CDP enabled + await p.chromium.launch( + args=[ + "--disable-gl-drawing-for-tests", + f"--remote-debugging-port={CDP_PORT}", + ], + channel="chromium", + devtools=True, + ) + logger.info(f"Chromium running with CDP enabled on localhost:{CDP_PORT}") + + # Wait indefinitely (or until the browser is closed) + await asyncio.Future() + + +if __name__ == "__main__": + with suppress(KeyboardInterrupt): + asyncio.run(run()) diff --git a/cicd/test-unit-ci.sh b/cicd/test-unit-ci.sh deleted file mode 100644 index 1f3deaf..0000000 --- a/cicd/test-unit-ci.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -# Start the first command in the background -make test-static-server 1>/dev/null 2>&1 & - -# Capture the PID of the background process -UNIT_RUN_PID=$! - -# Run the second command -make test-unit-run -exit_code=$? - -# Once the second command exits, kill the first process -kill $UNIT_RUN_PID - -# Exit with the same exit code as the second command -exit $exit_code diff --git a/pyproject.toml b/pyproject.toml index 9e38b70..62e624b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,6 +72,7 @@ dev = [ "pyinstaller~=6.11", # Create standalone executable "pyright~=1.1", # Static type checker "pytest-asyncio~=0.23", # Pytest plugin for async tests + "pytest-codspeed~=3.2.0", # Pytest plugin for measuring code speed "pytest-repeat~=0.9", # Pytest plugin for repeating tests "pytest-xdist[psutil]~=3.6", # Pytest plugin for parallel testing "pytest~=8.3", # Testing framework diff --git a/src/scrape_it_now/helpers/identity.py b/src/scrape_it_now/helpers/identity.py index b83c9d8..ba77feb 100644 --- a/src/scrape_it_now/helpers/identity.py +++ b/src/scrape_it_now/helpers/identity.py @@ -2,6 +2,7 @@ from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider +from scrape_it_now.helpers import IS_CI from scrape_it_now.helpers.cache import lru_acache from scrape_it_now.helpers.http import azure_transport @@ -9,6 +10,7 @@ @lru_acache() async def credential() -> DefaultAzureCredential: return DefaultAzureCredential( + process_timeout=120 if IS_CI else 10, # 2 mins in CI, 10 secs in production # Performance transport=await azure_transport(), ) diff --git a/src/scrape_it_now/persistence/azure_blob_storage.py b/src/scrape_it_now/persistence/azure_blob_storage.py index 26e8f4d..39c20c9 100644 --- a/src/scrape_it_now/persistence/azure_blob_storage.py +++ b/src/scrape_it_now/persistence/azure_blob_storage.py @@ -182,14 +182,12 @@ async def delete_container( await self._client.delete_container() # Wait for it to be deleted, API is eventually consistent while True: - try: - properties = await self._client.get_container_properties() - if properties.deleted: - break + with suppress(ResourceNotFoundError): + await self._client.get_container_properties() await asyncio.sleep(2) + continue # Deleted - except ResourceNotFoundError: - break + break logger.info('Deleted Blob Storage "%s"', self._config.name) async def __aenter__(self) -> "AzureBlobStorage": @@ -207,21 +205,55 @@ async def __aenter__(self) -> "AzureBlobStorage": container=self._config.name, ) - # Create if it does not exist - with suppress(ResourceExistsError): - # Create - await self._client.create_container() - # Wait for it to be created, API is eventually consistent - while True: - with suppress(ResourceNotFoundError): - properties = await self._client.get_container_properties() - if not properties.deleted: - break - await asyncio.sleep(2) - logger.debug('Created Blob Storage "%s"', self._config.name) + await self._wait_for_creation() + await self._wait_for_ready() # Return instance return self + async def _wait_for_ready(self) -> None: + """ + Wait for the container to be ready. + + API is not consistent, so we need to check if the resource is ready to be used. + """ + while True: + # Try using it + try: + # Upload and clean a test blob + await self.upload_blob( + blob="ping", + data=b"ping", + length=4, + overwrite=True, + ) + await self._client.delete_blob("ping") + # If no exception, the container is ready + logger.debug('Blob Storage "%s" is ready', self._config.name) + break + # If exception, the container is not ready yet + except Exception: + logger.debug("Blob Storage not ready yet, retrying", exc_info=True) + await asyncio.sleep(2) + + async def _wait_for_creation(self) -> None: + """ + Wait for the container to be created. + + Loop indefinitely until the the container respond to upload/download operations. Loop indefinitely until the container is created. API is not consistent, so we need to check if the resource is created. + """ + # Start creation + with suppress(ResourceExistsError): + await self._client.create_container() + + # Wait for it to be created, API is eventually consistent + while True: + with suppress(ResourceNotFoundError): + await self._client.get_container_properties() + logger.debug('Created Blob Storage "%s"', self._config.name) + # Created + break + await asyncio.sleep(2) + async def __aexit__(self, *exc: Any) -> None: await self._service.close() diff --git a/src/scrape_it_now/persistence/azure_queue_storage.py b/src/scrape_it_now/persistence/azure_queue_storage.py index 76dde1a..ea71111 100644 --- a/src/scrape_it_now/persistence/azure_queue_storage.py +++ b/src/scrape_it_now/persistence/azure_queue_storage.py @@ -120,35 +120,46 @@ async def create_queue( await self._wait_for_ready() async def _wait_for_ready(self) -> None: + """ + Wait for the queue to be ready. + + Loop indefinitely until the queue is respond to send/pull operations. API is not consistent, so we need to check if the resource is ready to be used. + """ while True: + # Try using it try: - # Send a test message + # Send and clean a test message await self.send_message("ping") - # Try to consume the message(s) async for message in self.receive_messages( max_messages=1, visibility_timeout=1 ): await self.delete_message(message) - # If no exception, the queue is created + # If no exception, the queue is ready logger.debug('Queue Storage "%s" is ready', self._config.name) - return - except Exception: # If exception, the queue is not created yet - logger.debug("Queue not created yet, retrying") + break + # If exception, the queue is not ready yet + except Exception: + logger.debug("Queue not ready yet, retrying", exc_info=True) await asyncio.sleep(2) async def _wait_for_creation(self) -> None: - # Create if it does not exist + """ + Wait for the queue to be created. + + Loop indefinitely until the queue is created. API is not consistent, so we need to check if the resource is created. + """ + # Start creation with suppress(ResourceExistsError): - # Create await self._client.create_queue() - # Wait for it to be created, API is eventually consistent - while True: - with suppress(ResourceNotFoundError): - await self._client.get_queue_properties() - logger.debug('Created Queue Storage "%s"', self._config.name) - # Created - return - await asyncio.sleep(2) + + # Wait for it to be created, API is eventually consistent + while True: + with suppress(ResourceNotFoundError): + await self._client.get_queue_properties() + logger.debug('Created Queue Storage "%s"', self._config.name) + # Created + break + await asyncio.sleep(2) @retry( reraise=True, @@ -166,12 +177,12 @@ async def delete_queue( await self._client.delete_queue() # Wait for it to be deleted, API is eventually consistent while True: - try: + with suppress(ResourceNotFoundError): await self._client.get_queue_properties() await asyncio.sleep(2) + continue # Deleted - except ResourceNotFoundError: - break + break logger.info('Deleted Queue Storage "%s"', self._config.name) def _escape(self, value: str) -> str: diff --git a/src/scrape_it_now/persistence/local_disk.py b/src/scrape_it_now/persistence/local_disk.py index 025f91d..a7f63e4 100644 --- a/src/scrape_it_now/persistence/local_disk.py +++ b/src/scrape_it_now/persistence/local_disk.py @@ -15,6 +15,7 @@ from aiofiles.os import makedirs, path, remove, rmdir from pydantic import BaseModel, Field +from scrape_it_now.helpers import IS_CI from scrape_it_now.helpers.logging import logger from scrape_it_now.helpers.resources import file_lock, local_disk_cache_path from scrape_it_now.models.message import Message @@ -254,7 +255,6 @@ async def __aexit__(self, *exc: Any) -> None: class QueueConfig(BaseModel): name: str table: str = "queue" - timeout: int = 30 async def db_path(self) -> str: return await path.abspath( @@ -398,10 +398,7 @@ async def create_queue( await makedirs(dirname(file_path), exist_ok=True) # Initialize the database - async with aiosqlite.connect( - database=file_path, - timeout=self._config.timeout, # Wait for 30 secs before giving up - ) as connection: + async with self._use_connection() as connection: # Enable WAL mode to allow multiple readers and one writer await connection.execute( """ @@ -437,7 +434,7 @@ async def _use_connection(self) -> AsyncGenerator[aiosqlite.Connection, None]: # Connect and return the connection async with aiosqlite.connect( database=await self._config.db_path(), - timeout=self._config.timeout, # Wait for 30 secs before giving up + timeout=2 * 60 if IS_CI else 30, # 2 mins in CI, 30 secs in production ) as connection: yield connection diff --git a/src/scrape_it_now/scrape.py b/src/scrape_it_now/scrape.py index fe13e8b..411c43a 100644 --- a/src/scrape_it_now/scrape.py +++ b/src/scrape_it_now/scrape.py @@ -33,6 +33,7 @@ wait_random_exponential, ) +from scrape_it_now.helpers import IS_CI from scrape_it_now.helpers.logging import logger from scrape_it_now.helpers.persistence import blob_client, queue_client from scrape_it_now.helpers.resources import ( @@ -75,8 +76,8 @@ # Bowser BROWSER_TIMEOUT_MS = ( - 5 * 60 * 1000 if IS_CI else 3 * 60 * 1000 -) # 5 mins in CI, 3 secs in production + 10 * 60 * 1000 if IS_CI else 3 * 60 * 1000 +) # 10 mins in CI, 3 mins in production async def _queue( # noqa: PLR0913 @@ -525,7 +526,7 @@ async def _worker( # noqa: PLR0913 logger.debug("Runing %i concurrent tasks per worker", scheduler.limit) # Get a browser instance - browser = await _get_broswer(p.chromium) + browser = await _get_headless_broswer(p.chromium) logger.debug("Started browser %s", browser.browser_type.name) # Process the queue @@ -1005,7 +1006,6 @@ def _network_used_callback(size_bytes: int) -> None: # Convert HTML to Markdown full_markdown = convert_text( format="html", # Input is HTML - sandbox=True, # Enable sandbox mode, we don't know what we are scraping source=full_html_minus_resources, to="markdown-fenced_divs-native_divs-raw_html-bracketed_spans-native_spans-link_attributes-header_attributes-inline_code_attributes", verify_format=False, # We know the format, don't verify it @@ -1359,16 +1359,15 @@ async def _install_browser(with_deps: bool) -> None: raise RuntimeError("Failed to install browser") -async def _get_broswer( +async def _get_headless_broswer( browser_type: BrowserType, ) -> Browser: """ - Launch a browser instance. + Launch a headless Chromium instance. """ # Launch the browser browser = await browser_type.launch( channel="chromium", # Explicitly use the new headless mode (see: https://playwright.dev/python/docs/browsers#chromium-new-headless-mode) - chromium_sandbox=True, # Enable the sandbox for security, we don't know what we are scraping timeout=BROWSER_TIMEOUT_MS, args=[ "--disable-gl-drawing-for-tests", # Disable UI rendering, lower CPU usage diff --git a/tests/blob.py b/tests/blob.py index 0aef672..aad0307 100644 --- a/tests/blob.py +++ b/tests/blob.py @@ -18,6 +18,19 @@ ) +@pytest.mark.parametrize( + "provider", + [ + BlobProvider.AZURE_BLOB_STORAGE, + BlobProvider.LOCAL_DISK, + ], + ids=lambda x: x.value, +) +@pytest.mark.benchmark +async def test_acid_single(provider: BlobProvider) -> None: + await _run_acid(provider) + + @pytest.mark.parametrize( "provider", [ @@ -27,7 +40,11 @@ ids=lambda x: x.value, ) @pytest.mark.repeat(10) # Catch multi-threading and concurrency issues -async def test_acid(provider: BlobProvider) -> None: +async def test_acid_repeat(provider: BlobProvider) -> None: + await _run_acid(provider) + + +async def _run_acid(provider: BlobProvider) -> None: # Init values blob_content = _random_content() blob_name = _random_name() diff --git a/tests/conftest.py b/tests/conftest.py index cd8b898..a294c02 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,8 @@ import pytest from playwright.async_api import Browser, async_playwright -from scrape_it_now.scrape import _get_broswer, install +from cicd.run_chromium import CDP_PORT +from scrape_it_now.scrape import BROWSER_TIMEOUT_MS, install @pytest.fixture @@ -11,12 +12,12 @@ async def browser() -> AsyncGenerator[Browser, None]: """ Fixture to provide a Playwright browser for each test. """ - # Make sure the browser and pandoc are installed - async with async_playwright() as p: - # Note: This won't install required system packages, make sure to install them manually - await install(False) + # Note: This won't install required system packages, make sure to install them manually + await install(False) - # Restart context to reload PATH to the newly installed binaries async with async_playwright() as p: - async with await _get_broswer(p.chromium) as browser: + async with await p.chromium.connect_over_cdp( + endpoint_url=f"http://localhost:{CDP_PORT}", + timeout=BROWSER_TIMEOUT_MS, + ) as browser: yield browser diff --git a/tests/queue.py b/tests/queue.py index 1165d07..bd10da3 100644 --- a/tests/queue.py +++ b/tests/queue.py @@ -13,6 +13,19 @@ ) +@pytest.mark.parametrize( + "provider", + [ + QueueProvider.AZURE_QUEUE_STORAGE, + QueueProvider.LOCAL_DISK, + ], + ids=lambda x: x.value, +) +@pytest.mark.benchmark +async def test_acid_single(provider: QueueProvider) -> None: + await _run_acid(provider) + + @pytest.mark.parametrize( "provider", [ @@ -22,7 +35,11 @@ ids=lambda x: x.value, ) @pytest.mark.repeat(10) # Catch multi-threading and concurrency issues -async def test_acid(provider: QueueProvider) -> None: +async def test_acid_repeat(provider: QueueProvider) -> None: + await _run_acid(provider) + + +async def _run_acid(provider: QueueProvider) -> None: # Init values queue_name = _random_name() contents = [ diff --git a/tests/scrape.py b/tests/scrape.py index de11ff9..56c6cea 100644 --- a/tests/scrape.py +++ b/tests/scrape.py @@ -128,6 +128,7 @@ async def test_scrape_page_website( await rmdir(website_path) +@pytest.mark.benchmark async def test_scrape_page_links(browser: Browser) -> None: """ Test a page with links against the expected links and title. diff --git a/uv.lock b/uv.lock index 30ac726..3db46eb 100644 --- a/uv.lock +++ b/uv.lock @@ -763,6 +763,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/5d/c059c180c84f7962db0aeae7c3b9303ed1d73d76f2bfbc32bc231c8be314/macholib-1.16.3-py2.py3-none-any.whl", hash = "sha256:0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c", size = 38094 }, ] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + [[package]] name = "msal" version = "1.31.1" @@ -1483,6 +1504,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1d/0d/95993c08c721ec68892547f2117e8f9dfbcef2ca71e098533541b4a54d5f/pyee-12.0.0-py3-none-any.whl", hash = "sha256:7b14b74320600049ccc7d0e0b1becd3b4bd0a03c745758225e31a59f4095c990", size = 14831 }, ] +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, +] + [[package]] name = "pyinstaller" version = "6.11.1" @@ -1587,6 +1617,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/96/31/6607dab48616902f76885dfcf62c08d929796fc3b2d2318faf9fd54dbed9/pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b", size = 18024 }, ] +[[package]] +name = "pytest-codspeed" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, + { name = "pytest" }, + { name = "rich" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/03/98/16fe3895b1b8a6d537a89eecb120b97358df8f0002c6ecd11555d6304dc8/pytest_codspeed-3.2.0.tar.gz", hash = "sha256:f9d1b1a3b2c69cdc0490a1e8b1ced44bffbd0e8e21d81a7160cfdd923f6e8155", size = 18409 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/56/1b65ba0ae1af7fd7ce14a66e7599833efe8bbd0fcecd3614db0017ca224a/pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf6f56067538f4892baa8d7ab5ef4e45bb59033be1ef18759a2c7fc55b32035", size = 26810 }, + { url = "https://files.pythonhosted.org/packages/23/e6/d1fafb09a1c4983372f562d9e158735229cb0b11603a61d4fad05463f977/pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39a687b05c3d145642061b45ea78e47e12f13ce510104d1a2cda00eee0e36f58", size = 25442 }, + { url = "https://files.pythonhosted.org/packages/0b/8b/9e95472589d17bb68960f2a09cfa8f02c4d43c82de55b73302bbe0fa4350/pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46a1afaaa1ac4c2ca5b0700d31ac46d80a27612961d031067d73c6ccbd8d3c2b", size = 27182 }, + { url = "https://files.pythonhosted.org/packages/2a/18/82aaed8095e84d829f30dda3ac49fce4e69685d769aae463614a8d864cdd/pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48ce3af3dfa78413ed3d69d1924043aa1519048dbff46edccf8f35a25dab3c2", size = 25933 }, + { url = "https://files.pythonhosted.org/packages/e2/15/60b18d40da66e7aa2ce4c4c66d5a17de20a2ae4a89ac09a58baa7a5bc535/pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66692506d33453df48b36a84703448cb8b22953eea51f03fbb2eb758dc2bdc4f", size = 27180 }, + { url = "https://files.pythonhosted.org/packages/51/bd/6b164d4ae07d8bea5d02ad664a9762bdb63f83c0805a3c8fe7dc6ec38407/pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:479774f80d0bdfafa16112700df4dbd31bf2a6757fac74795fd79c0a7b3c389b", size = 25923 }, + { url = "https://files.pythonhosted.org/packages/f1/9b/952c70bd1fae9baa58077272e7f191f377c86d812263c21b361195e125e6/pytest_codspeed-3.2.0-py3-none-any.whl", hash = "sha256:54b5c2e986d6a28e7b0af11d610ea57bd5531cec8326abe486f1b55b09d91c39", size = 15007 }, +] + [[package]] name = "pytest-repeat" version = "0.9.3" @@ -1745,6 +1795,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/33/190393a7d36872e237cbc99e6c44d9a078a1ba7b406462fe6eafd5a28e04/requirements_parser-0.11.0-py3-none-any.whl", hash = "sha256:50379eb50311834386c2568263ae5225d7b9d0867fb55cf4ecc93959de2c2684", size = 14800 }, ] +[[package]] +name = "rich" +version = "13.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 }, +] + [[package]] name = "ruff" version = "0.8.1" @@ -1808,6 +1871,7 @@ dev = [ { name = "pyright" }, { name = "pytest" }, { name = "pytest-asyncio" }, + { name = "pytest-codspeed" }, { name = "pytest-repeat" }, { name = "pytest-xdist", extra = ["psutil"] }, { name = "ruff" }, @@ -1840,6 +1904,7 @@ requires-dist = [ { name = "pyright", marker = "extra == 'dev'", specifier = "~=1.1" }, { name = "pytest", marker = "extra == 'dev'", specifier = "~=8.3" }, { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = "~=0.23" }, + { name = "pytest-codspeed", marker = "extra == 'dev'", specifier = "~=3.2.0" }, { name = "pytest-repeat", marker = "extra == 'dev'", specifier = "~=0.9" }, { name = "pytest-xdist", extras = ["psutil"], marker = "extra == 'dev'", specifier = "~=3.6" }, { name = "python-dotenv", specifier = "~=1.0" },