From 6d23249573e8f9039701035b3a363dc01e26b708 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 08:31:19 -0500 Subject: [PATCH 01/21] Add TMS credential management to dapi Add check_credentials(), establish_credentials(), and revoke_credentials() to manage Tapis Managed Secrets (TMS) SSH keys on TACC execution systems. Includes CredentialError exception, 31 tests, docs, and example notebook. Also fixes missing comma bug in __init__.py __all__ list. --- dapi/__init__.py | 5 +- dapi/client.py | 75 ++++++++++ dapi/exceptions.py | 20 +++ dapi/systems.py | 201 ++++++++++++++++++++++++++- docs/api/exceptions.md | 4 + docs/api/systems.md | 20 ++- docs/authentication.md | 79 +++++++++++ docs/examples/tms_credentials.md | 44 ++++++ docs/index.md | 3 +- docs/quickstart.md | 20 ++- examples/tms_credentials.ipynb | 141 +++++++++++++++++++ mkdocs.yml | 1 + tests/systems/test_credentials.py | 223 ++++++++++++++++++++++++++++++ 13 files changed, 829 insertions(+), 7 deletions(-) create mode 100644 docs/examples/tms_credentials.md create mode 100644 examples/tms_credentials.ipynb create mode 100644 tests/systems/test_credentials.py diff --git a/dapi/__init__.py b/dapi/__init__.py index d71a778..04a51d3 100644 --- a/dapi/__init__.py +++ b/dapi/__init__.py @@ -61,6 +61,7 @@ FileOperationError, AppDiscoveryError, SystemInfoError, + CredentialError, JobSubmissionError, JobMonitorError, ) @@ -94,6 +95,8 @@ "AuthenticationError", "FileOperationError", "AppDiscoveryError", - "SystemInfoError" "JobSubmissionError", + "SystemInfoError", + "CredentialError", + "JobSubmissionError", "JobMonitorError", ] diff --git a/dapi/client.py b/dapi/client.py index f597408..3abe6b9 100644 --- a/dapi/client.py +++ b/dapi/client.py @@ -287,6 +287,81 @@ def list_queues(self, system_id: str, verbose: bool = True) -> List[Any]: self._tapis, system_id, verbose=verbose ) + def check_credentials( + self, system_id: str, username: str = None + ) -> bool: + """Check whether TMS credentials exist for a user on a system. + + Args: + system_id (str): The ID of the Tapis system (e.g., 'frontera'). + username (str, optional): Username to check. Defaults to + the authenticated user. + + Returns: + bool: True if credentials exist, False otherwise. + + Raises: + CredentialError: If the credential check fails unexpectedly. + ValueError: If system_id is empty. + """ + return systems_module.check_credentials( + self._tapis, system_id, username=username + ) + + def establish_credentials( + self, + system_id: str, + username: str = None, + force: bool = False, + verbose: bool = True, + ) -> None: + """Establish TMS credentials for a user on a Tapis system. + + Idempotent: skips creation if credentials already exist (unless force=True). + Only supported for systems using TMS_KEYS authentication. + + Args: + system_id (str): The ID of the Tapis system (e.g., 'frontera'). + username (str, optional): Username. Defaults to the authenticated user. + force (bool, optional): Re-create even if credentials exist. + Defaults to False. + verbose (bool, optional): Print status messages. Defaults to True. + + Raises: + CredentialError: If the system is not TMS_KEYS or creation fails. + ValueError: If system_id is empty. + """ + return systems_module.establish_credentials( + self._tapis, + system_id, + username=username, + force=force, + verbose=verbose, + ) + + def revoke_credentials( + self, + system_id: str, + username: str = None, + verbose: bool = True, + ) -> None: + """Remove TMS credentials for a user on a Tapis system. + + Idempotent: succeeds silently if credentials do not exist. + + Args: + system_id (str): The ID of the Tapis system (e.g., 'frontera'). + username (str, optional): Username. Defaults to the authenticated user. + verbose (bool, optional): Print status messages. Defaults to True. + + Raises: + CredentialError: If credential removal fails unexpectedly. + ValueError: If system_id is empty. + """ + return systems_module.revoke_credentials( + self._tapis, system_id, username=username, verbose=verbose + ) + class JobMethods: """Interface for Tapis job submission, monitoring, and management. diff --git a/dapi/exceptions.py b/dapi/exceptions.py index a72abf2..fa66368 100644 --- a/dapi/exceptions.py +++ b/dapi/exceptions.py @@ -117,6 +117,26 @@ class SystemInfoError(DapiException): pass +class CredentialError(DapiException): + """Exception raised when credential management operations fail. + + This exception is raised when operations involving Tapis Managed Secrets (TMS) + credentials fail, such as checking, establishing, or revoking user credentials + on a Tapis execution system. + + Args: + message (str): Description of the credential operation failure. + + Example: + >>> try: + ... client.systems.establish_credentials("frontera") + ... except CredentialError as e: + ... print(f"Credential operation failed: {e}") + """ + + pass + + class JobSubmissionError(DapiException): """Exception raised when job submission or validation fails. diff --git a/dapi/systems.py b/dapi/systems.py index c406485..a4cd5c4 100644 --- a/dapi/systems.py +++ b/dapi/systems.py @@ -1,8 +1,8 @@ # dapi/systems.py from tapipy.tapis import Tapis -from tapipy.errors import BaseTapyException +from tapipy.errors import BaseTapyException, UnauthorizedError, NotFoundError from typing import List, Any, Optional -from .exceptions import SystemInfoError +from .exceptions import SystemInfoError, CredentialError def list_system_queues(t: Tapis, system_id: str, verbose: bool = True) -> List[Any]: @@ -95,3 +95,200 @@ def list_system_queues(t: Tapis, system_id: str, verbose: bool = True) -> List[A raise SystemInfoError( f"An unexpected error occurred while fetching queues for system '{system_id}': {e}" ) from e + + +def _resolve_username(t: Tapis, username: Optional[str] = None) -> str: + """Resolve the effective username from an explicit parameter or the Tapis client. + + Args: + t: Authenticated Tapis client instance. + username: Explicit username. If None, falls back to t.username. + + Returns: + The resolved username string. + + Raises: + ValueError: If username cannot be determined from either source. + """ + effective = username or getattr(t, "username", None) + if not effective: + raise ValueError( + "Username must be provided or available on the Tapis client (t.username)." + ) + return effective + + +def check_credentials( + t: Tapis, system_id: str, username: Optional[str] = None +) -> bool: + """Check whether TMS credentials exist for a user on a Tapis system. + + Args: + t: Authenticated Tapis client instance. + system_id: The ID of the Tapis system (e.g., 'frontera', 'stampede3'). + username: The username to check. If None, auto-detected from t.username. + + Returns: + True if credentials exist, False if they do not. + + Raises: + ValueError: If system_id is empty or username cannot be determined. + CredentialError: If an unexpected API error occurs during the check. + """ + if not system_id: + raise ValueError("system_id cannot be empty.") + + effective_username = _resolve_username(t, username) + + try: + t.systems.checkUserCredential( + systemId=system_id, userName=effective_username + ) + return True + except (UnauthorizedError, NotFoundError): + return False + except BaseTapyException as e: + raise CredentialError( + f"Failed to check credentials for user '{effective_username}' " + f"on system '{system_id}': {e}" + ) from e + except Exception as e: + raise CredentialError( + f"Unexpected error checking credentials for user '{effective_username}' " + f"on system '{system_id}': {e}" + ) from e + + +def establish_credentials( + t: Tapis, + system_id: str, + username: Optional[str] = None, + force: bool = False, + verbose: bool = True, +) -> None: + """Establish TMS credentials for a user on a Tapis system. + + Idempotent: if credentials already exist and force is False, no action is taken. + Only systems with defaultAuthnMethod 'TMS_KEYS' are supported. + + Args: + t: Authenticated Tapis client instance. + system_id: The ID of the Tapis system (e.g., 'frontera', 'stampede3'). + username: The username. If None, auto-detected from t.username. + force: If True, create credentials even if they already exist. + verbose: If True, prints status messages. + + Raises: + ValueError: If system_id is empty or username cannot be determined. + CredentialError: If the system does not use TMS_KEYS, if the system is + not found, or if credential creation fails. + """ + if not system_id: + raise ValueError("system_id cannot be empty.") + + effective_username = _resolve_username(t, username) + + # Verify system exists and uses TMS_KEYS authentication + try: + system_details = t.systems.getSystem(systemId=system_id) + authn_method = getattr(system_details, "defaultAuthnMethod", None) + except BaseTapyException as e: + if hasattr(e, "response") and e.response and e.response.status_code == 404: + raise CredentialError( + f"System '{system_id}' not found." + ) from e + raise CredentialError( + f"Failed to retrieve system '{system_id}': {e}" + ) from e + + if authn_method != "TMS_KEYS": + raise CredentialError( + f"System '{system_id}' uses authentication method '{authn_method}', " + f"not 'TMS_KEYS'. TMS credential management is only supported " + f"for TMS_KEYS systems." + ) + + # Check existing credentials unless force is True + if not force: + if check_credentials(t, system_id, effective_username): + if verbose: + print( + f"Credentials already exist for user '{effective_username}' " + f"on system '{system_id}'. No action taken." + ) + return + + # Create credentials + try: + t.systems.createUserCredential( + systemId=system_id, + userName=effective_username, + createTmsKeys=True, + ) + if verbose: + print( + f"TMS credentials established for user '{effective_username}' " + f"on system '{system_id}'." + ) + except BaseTapyException as e: + raise CredentialError( + f"Failed to create credentials for user '{effective_username}' " + f"on system '{system_id}': {e}" + ) from e + except Exception as e: + raise CredentialError( + f"Unexpected error creating credentials for user '{effective_username}' " + f"on system '{system_id}': {e}" + ) from e + + +def revoke_credentials( + t: Tapis, + system_id: str, + username: Optional[str] = None, + verbose: bool = True, +) -> None: + """Remove TMS credentials for a user on a Tapis system. + + Idempotent: if credentials do not exist, no error is raised. + + Args: + t: Authenticated Tapis client instance. + system_id: The ID of the Tapis system (e.g., 'frontera', 'stampede3'). + username: The username. If None, auto-detected from t.username. + verbose: If True, prints status messages. + + Raises: + ValueError: If system_id is empty or username cannot be determined. + CredentialError: If credential removal fails unexpectedly. + """ + if not system_id: + raise ValueError("system_id cannot be empty.") + + effective_username = _resolve_username(t, username) + + try: + t.systems.removeUserCredential( + systemId=system_id, userName=effective_username + ) + if verbose: + print( + f"Credentials revoked for user '{effective_username}' " + f"on system '{system_id}'." + ) + except (UnauthorizedError, NotFoundError): + if verbose: + print( + f"No credentials found for user '{effective_username}' " + f"on system '{system_id}'. No action taken." + ) + except BaseTapyException as e: + raise CredentialError( + f"Failed to revoke credentials for user '{effective_username}' " + f"on system '{system_id}': {e}" + ) from e + except Exception as e: + raise CredentialError( + f"Unexpected error revoking credentials for user '{effective_username}' " + f"on system '{system_id}': {e}" + ) from e diff --git a/docs/api/exceptions.md b/docs/api/exceptions.md index d887bb0..2402c7a 100644 --- a/docs/api/exceptions.md +++ b/docs/api/exceptions.md @@ -22,6 +22,10 @@ Custom exception classes for DAPI error handling and debugging. ::: dapi.exceptions.SystemInfoError +## Credential Management Exceptions + +::: dapi.exceptions.CredentialError + ## Job Management Exceptions ::: dapi.exceptions.JobSubmissionError diff --git a/docs/api/systems.md b/docs/api/systems.md index 902148a..8233026 100644 --- a/docs/api/systems.md +++ b/docs/api/systems.md @@ -1,7 +1,23 @@ # Systems -System information and queue management for DesignSafe execution systems. +System information, queue management, and TMS credential management for DesignSafe execution systems. ## System Queues -::: dapi.systems.list_system_queues \ No newline at end of file +::: dapi.systems.list_system_queues + +## TMS Credential Management + +Manage Tapis Managed Secrets (TMS) credentials on execution systems. TMS credentials are SSH key pairs that allow Tapis to access TACC systems (Frontera, Stampede3, Lonestar6) on behalf of a user. They must be established once per system before submitting jobs. + +### Check Credentials + +::: dapi.systems.check_credentials + +### Establish Credentials + +::: dapi.systems.establish_credentials + +### Revoke Credentials + +::: dapi.systems.revoke_credentials \ No newline at end of file diff --git a/docs/authentication.md b/docs/authentication.md index 9653a7d..9a6ddb2 100644 --- a/docs/authentication.md +++ b/docs/authentication.md @@ -180,6 +180,85 @@ dev_client = DSClient(env_file=".env.development") prod_client = DSClient(env_file=".env.production") ``` +## 🔑 TMS Credentials (Execution System Access) + +After authenticating with DesignSafe, you also need **TMS credentials** on any execution system where you plan to submit jobs. TMS (Trust Management System) manages SSH key pairs that allow Tapis to access TACC systems (Frontera, Stampede3, Lonestar6) on your behalf. + +!!! info "One-time setup" + TMS credentials only need to be established **once per system**. After that, they persist until you revoke them. + +### Establish Credentials + +```python +from dapi import DSClient + +client = DSClient() + +# Establish TMS credentials on execution systems +client.systems.establish_credentials("frontera") +client.systems.establish_credentials("stampede3") +client.systems.establish_credentials("ls6") +``` + +If credentials already exist, `establish_credentials` does nothing (idempotent). To force re-creation: + +```python +client.systems.establish_credentials("frontera", force=True) +``` + +### Check Credentials + +```python +# Check if credentials exist before submitting a job +if client.systems.check_credentials("frontera"): + print("Ready to submit jobs on Frontera") +else: + client.systems.establish_credentials("frontera") +``` + +### Revoke Credentials + +```python +# Remove credentials (e.g., to reset keys) +client.systems.revoke_credentials("frontera") +``` + +### Using TMS from Outside DesignSafe + +TMS credentials work from any environment -- not just DesignSafe JupyterHub. As long as you can authenticate with Tapis (e.g., via `.env` file), you can establish and manage TMS credentials from your laptop, CI/CD pipelines, or any Python script: + +```bash +# .env file +DESIGNSAFE_USERNAME=your_username +DESIGNSAFE_PASSWORD=your_password +``` + +```python +from dapi import DSClient + +# Works from anywhere with network access to designsafe.tapis.io +client = DSClient() +client.systems.establish_credentials("frontera") + +# Now submit jobs as usual +job_request = client.jobs.generate_request(...) +job = client.jobs.submit_request(job_request) +``` + +### Troubleshooting TMS + +#### Non-TMS System +``` +CredentialError: System 'my-system' uses authentication method 'PASSWORD', not 'TMS_KEYS'. +``` +**Solution**: TMS credential management only works for systems configured with `TMS_KEYS` authentication. TACC execution systems (frontera, stampede3, ls6) use TMS_KEYS. + +#### System Not Found +``` +CredentialError: System 'nonexistent' not found. +``` +**Solution**: Verify the system ID. Common system IDs: `frontera`, `stampede3`, `ls6`. + ## ✅ Verifying Authentication ### Check Authentication Status diff --git a/docs/examples/tms_credentials.md b/docs/examples/tms_credentials.md new file mode 100644 index 0000000..3661036 --- /dev/null +++ b/docs/examples/tms_credentials.md @@ -0,0 +1,44 @@ +# TMS Credential Management + +This example demonstrates how to manage TMS (Trust Management System) credentials on TACC execution systems using dapi. TMS credentials are SSH key pairs that allow Tapis to access systems like Frontera, Stampede3, and Lonestar6 on your behalf. + +[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/tms_credentials.ipynb) + +## Overview + +Before submitting jobs to TACC execution systems, you need TMS credentials established on each system. This is a **one-time setup** per system -- once established, credentials persist until you revoke them. + +This example covers: + +- Checking if credentials exist on a system +- Establishing new credentials (idempotent) +- Force re-creating credentials +- Revoking credentials for cleanup +- Error handling for non-TMS systems + +## Quick Start + +```python +from dapi import DSClient + +ds = DSClient() + +# Check and establish credentials on TACC systems +systems = ["frontera", "stampede3", "ls6"] + +for system_id in systems: + ds.systems.establish_credentials(system_id) +``` + +## API Reference + +| Method | Purpose | +|--------|---------| +| `ds.systems.check_credentials("system_id")` | Returns `True`/`False` | +| `ds.systems.establish_credentials("system_id")` | Creates credentials if missing | +| `ds.systems.establish_credentials("system_id", force=True)` | Re-creates credentials | +| `ds.systems.revoke_credentials("system_id")` | Removes credentials | + +All methods auto-detect your username. Pass `username="other_user"` to override. + +See the [Systems API Reference](../api/systems.md) for full details. diff --git a/docs/index.md b/docs/index.md index 3f23a2e..b6b8450 100644 --- a/docs/index.md +++ b/docs/index.md @@ -28,9 +28,10 @@ Welcome to the **DesignSafe API (dapi)** documentation! - **File Management**: Upload, download, and list files on DesignSafe storage - **Path Verification**: Validate that paths exist before using them -### 🔐 Authentication +### 🔐 Authentication & Credentials - **Simplified Auth**: Easy authentication with DesignSafe credentials - **Multiple Methods**: Support for environment variables, .env files, and interactive input +- **TMS Credential Management**: Establish, check, and revoke SSH keys on TACC execution systems - **Secure**: Handles credentials securely with encrypted storage ## 🏃‍♂️ Quick Start diff --git a/docs/quickstart.md b/docs/quickstart.md index a3d4c3c..232f114 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -61,6 +61,21 @@ client = DSClient() # Output: Authentication successful. ``` +### Step 1b: Establish TMS Credentials (One-Time) + +Before submitting jobs, ensure you have TMS credentials on the execution system: + +```python +# One-time setup per system -- safe to call repeatedly +client.systems.establish_credentials("frontera") +# Output: TMS credentials established for user 'myuser' on system 'frontera'. + +# Or if already established: +# Output: Credentials already exist for user 'myuser' on system 'frontera'. No action taken. +``` + +See the [Authentication Guide](authentication.md#tms-credentials-execution-system-access) for details. + ### Step 2: Explore Available Applications ```python @@ -275,7 +290,8 @@ elif final_status == "TIMEOUT": ```python from dapi import ( AuthenticationError, - JobSubmissionError, + CredentialError, + JobSubmissionError, FileOperationError, JobMonitorError ) @@ -290,6 +306,8 @@ try: except AuthenticationError as e: print(f"Authentication failed: {e}") +except CredentialError as e: + print(f"TMS credential error: {e}") except JobSubmissionError as e: print(f"Job submission failed: {e}") except FileOperationError as e: diff --git a/examples/tms_credentials.ipynb b/examples/tms_credentials.ipynb new file mode 100644 index 0000000..8fc01d1 --- /dev/null +++ b/examples/tms_credentials.ipynb @@ -0,0 +1,141 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "9z59vfnpyu4", + "source": "# TMS Credential Management on DesignSafe\n\nBefore submitting jobs to TACC execution systems (Frontera, Stampede3, Lonestar6), you need **TMS credentials** established on each system. TMS (Trust Management System) manages SSH key pairs that allow Tapis to access these systems on your behalf.\n\nThis notebook shows how to:\n1. Authenticate with DesignSafe\n2. Check if TMS credentials exist on a system\n3. Establish new credentials\n4. Revoke credentials (for cleanup/reset)", + "metadata": {} + }, + { + "cell_type": "markdown", + "id": "nu5cikz60mf", + "source": "## Install dapi", + "metadata": {} + }, + { + "cell_type": "code", + "id": "6ffuni7dmhe", + "source": "%pip install dapi --quiet", + "metadata": {}, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "id": "a2owvhvfc1d", + "source": "## 1. Authenticate with DesignSafe\n\nInitialize the `DSClient`. This handles authentication via environment variables, `.env` file, or interactive prompts.", + "metadata": {} + }, + { + "cell_type": "code", + "id": "gh08fr4y8sd", + "source": "from dapi import DSClient, CredentialError\n\nds = DSClient()", + "metadata": {}, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "id": "5k0vl6xeoq8", + "source": "## 2. Check existing credentials\n\nCheck whether TMS credentials already exist on a system. Returns `True` or `False`.", + "metadata": {} + }, + { + "cell_type": "code", + "id": "swa2hbzjcs", + "source": "# Check credentials on common TACC systems\nsystems = [\"frontera\", \"stampede3\", \"ls6\"]\n\nfor system_id in systems:\n has_creds = ds.systems.check_credentials(system_id)\n status = \"ready\" if has_creds else \"needs setup\"\n print(f\" {system_id}: {status}\")", + "metadata": {}, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "id": "gb5c0znrbj5", + "source": "## 3. Establish TMS credentials\n\nEstablish credentials on systems where they are missing. This is **idempotent** -- if credentials already exist, it skips creation.", + "metadata": {} + }, + { + "cell_type": "code", + "id": "d9oqpymynys", + "source": "# Establish credentials on all three systems\nfor system_id in systems:\n ds.systems.establish_credentials(system_id)", + "metadata": {}, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "id": "jcrayarevbp", + "source": "### Force re-creation\n\nUse `force=True` to re-create credentials even if they already exist. This is useful if keys are corrupted or you need a fresh pair.", + "metadata": {} + }, + { + "cell_type": "code", + "id": "8xezj3ph9os", + "source": "# Force re-create credentials on a specific system\nds.systems.establish_credentials(\"frontera\", force=True)", + "metadata": {}, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "id": "6ffzxz48gei", + "source": "## 4. Verify setup and submit a job\n\nAfter establishing credentials, verify they are in place and submit a test job.", + "metadata": {} + }, + { + "cell_type": "code", + "id": "jrtkc5w6p2", + "source": "# Confirm all systems are ready\nprint(\"Credential status after setup:\")\nfor system_id in systems:\n has_creds = ds.systems.check_credentials(system_id)\n print(f\" {system_id}: {'ready' if has_creds else 'MISSING'}\")", + "metadata": {}, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "id": "hn59uzcepuh", + "source": "## 5. Error handling\n\nThe `CredentialError` exception is raised when credential operations fail (e.g., system not found, non-TMS system).", + "metadata": {} + }, + { + "cell_type": "code", + "id": "1r7517fx2il", + "source": "# Handling errors: non-existent system\ntry:\n ds.systems.establish_credentials(\"nonexistent-system\")\nexcept CredentialError as e:\n print(f\"Expected error: {e}\")", + "metadata": {}, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "id": "e14iwod93y9", + "source": "## 6. Revoke credentials (optional)\n\nRemove TMS credentials from a system. This is useful for cleanup or resetting keys. Also idempotent -- succeeds silently if credentials don't exist.", + "metadata": {} + }, + { + "cell_type": "code", + "id": "fhogvmfrwol", + "source": "# Uncomment to revoke credentials on a system\n# ds.systems.revoke_credentials(\"frontera\")", + "metadata": {}, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "id": "nraxef6ruhm", + "source": "## Summary\n\n| Method | Purpose | Idempotent |\n|--------|---------|------------|\n| `ds.systems.check_credentials(\"system_id\")` | Check if credentials exist | N/A (read-only) |\n| `ds.systems.establish_credentials(\"system_id\")` | Create credentials if missing | Yes |\n| `ds.systems.establish_credentials(\"system_id\", force=True)` | Re-create credentials | Yes |\n| `ds.systems.revoke_credentials(\"system_id\")` | Remove credentials | Yes |\n\nAll methods auto-detect your username from the authenticated Tapis client. You can also pass `username=\"other_user\"` explicitly.", + "metadata": {} + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.9.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index f523d71..e156356 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -83,6 +83,7 @@ nav: - MPM Job: examples/mpm.md - OpenSees Job: examples/opensees.md - OpenFOAM Job: examples/openfoam.md + - TMS Credentials: examples/tms_credentials.md - Database Queries: examples/database.md - API Reference: - Overview: api/index.md diff --git a/tests/systems/test_credentials.py b/tests/systems/test_credentials.py new file mode 100644 index 0000000..2f9a99b --- /dev/null +++ b/tests/systems/test_credentials.py @@ -0,0 +1,223 @@ +import unittest +from unittest.mock import Mock, MagicMock + +from tapipy.errors import UnauthorizedError, NotFoundError, BaseTapyException + +from dapi.systems import ( + check_credentials, + establish_credentials, + revoke_credentials, + _resolve_username, +) +from dapi.exceptions import CredentialError + + +class TestResolveUsername(unittest.TestCase): + def test_uses_explicit_username(self): + t = MagicMock() + t.username = "tapis_user" + self.assertEqual(_resolve_username(t, "explicit_user"), "explicit_user") + + def test_falls_back_to_tapis_username(self): + t = MagicMock() + t.username = "tapis_user" + self.assertEqual(_resolve_username(t, None), "tapis_user") + + def test_raises_when_no_username_available(self): + t = MagicMock(spec=[]) # no username attr + with self.assertRaises(ValueError): + _resolve_username(t, None) + + def test_raises_when_username_is_empty_string(self): + t = MagicMock() + t.username = "" + with self.assertRaises(ValueError): + _resolve_username(t, "") + + +class TestCheckCredentials(unittest.TestCase): + def setUp(self): + self.t = MagicMock() + self.t.username = "testuser" + + def test_returns_true_when_credentials_exist(self): + self.t.systems.checkUserCredential.return_value = Mock() + self.assertTrue(check_credentials(self.t, "frontera")) + self.t.systems.checkUserCredential.assert_called_once_with( + systemId="frontera", userName="testuser" + ) + + def test_returns_false_on_unauthorized(self): + self.t.systems.checkUserCredential.side_effect = UnauthorizedError() + self.assertFalse(check_credentials(self.t, "frontera")) + + def test_returns_false_on_not_found(self): + self.t.systems.checkUserCredential.side_effect = NotFoundError() + self.assertFalse(check_credentials(self.t, "frontera")) + + def test_uses_explicit_username(self): + self.t.systems.checkUserCredential.return_value = Mock() + check_credentials(self.t, "frontera", username="otheruser") + self.t.systems.checkUserCredential.assert_called_once_with( + systemId="frontera", userName="otheruser" + ) + + def test_raises_value_error_for_empty_system_id(self): + with self.assertRaises(ValueError): + check_credentials(self.t, "") + + def test_raises_value_error_when_no_username(self): + self.t.username = None + with self.assertRaises(ValueError): + check_credentials(self.t, "frontera") + + def test_raises_credential_error_on_unexpected_api_error(self): + self.t.systems.checkUserCredential.side_effect = BaseTapyException( + "server error" + ) + with self.assertRaises(CredentialError): + check_credentials(self.t, "frontera") + + def test_raises_credential_error_on_generic_exception(self): + self.t.systems.checkUserCredential.side_effect = RuntimeError("boom") + with self.assertRaises(CredentialError): + check_credentials(self.t, "frontera") + + +class TestEstablishCredentials(unittest.TestCase): + def setUp(self): + self.t = MagicMock() + self.t.username = "testuser" + # Default: system uses TMS_KEYS + self.mock_system = Mock() + self.mock_system.defaultAuthnMethod = "TMS_KEYS" + self.t.systems.getSystem.return_value = self.mock_system + + def test_creates_credentials_when_missing(self): + self.t.systems.checkUserCredential.side_effect = UnauthorizedError() + establish_credentials(self.t, "frontera", verbose=False) + self.t.systems.createUserCredential.assert_called_once_with( + systemId="frontera", userName="testuser", createTmsKeys=True + ) + + def test_skips_when_credentials_exist(self): + self.t.systems.checkUserCredential.return_value = Mock() + establish_credentials(self.t, "frontera", verbose=False) + self.t.systems.createUserCredential.assert_not_called() + + def test_force_creates_even_when_credentials_exist(self): + establish_credentials(self.t, "frontera", force=True, verbose=False) + self.t.systems.createUserCredential.assert_called_once_with( + systemId="frontera", userName="testuser", createTmsKeys=True + ) + # Should NOT call checkUserCredential when force=True + self.t.systems.checkUserCredential.assert_not_called() + + def test_raises_credential_error_for_non_tms_system(self): + self.mock_system.defaultAuthnMethod = "PASSWORD" + with self.assertRaises(CredentialError) as ctx: + establish_credentials(self.t, "frontera", verbose=False) + self.assertIn("PASSWORD", str(ctx.exception)) + self.assertIn("TMS_KEYS", str(ctx.exception)) + + def test_raises_credential_error_when_system_not_found(self): + error = BaseTapyException("not found") + mock_response = Mock() + mock_response.status_code = 404 + error.response = mock_response + self.t.systems.getSystem.side_effect = error + with self.assertRaises(CredentialError) as ctx: + establish_credentials(self.t, "nonexistent", verbose=False) + self.assertIn("not found", str(ctx.exception).lower()) + + def test_raises_credential_error_on_get_system_api_error(self): + error = BaseTapyException("server error") + error.response = None + self.t.systems.getSystem.side_effect = error + with self.assertRaises(CredentialError): + establish_credentials(self.t, "frontera", verbose=False) + + def test_raises_value_error_for_empty_system_id(self): + with self.assertRaises(ValueError): + establish_credentials(self.t, "") + + def test_uses_explicit_username(self): + self.t.systems.checkUserCredential.side_effect = UnauthorizedError() + establish_credentials( + self.t, "frontera", username="otheruser", verbose=False + ) + self.t.systems.createUserCredential.assert_called_once_with( + systemId="frontera", userName="otheruser", createTmsKeys=True + ) + + def test_raises_credential_error_on_create_failure(self): + self.t.systems.checkUserCredential.side_effect = UnauthorizedError() + self.t.systems.createUserCredential.side_effect = BaseTapyException( + "create failed" + ) + with self.assertRaises(CredentialError): + establish_credentials(self.t, "frontera", verbose=False) + + def test_verbose_prints_skip_message(self, ): + self.t.systems.checkUserCredential.return_value = Mock() + # Should not raise; just prints a message + establish_credentials(self.t, "frontera", verbose=True) + self.t.systems.createUserCredential.assert_not_called() + + def test_verbose_prints_creation_message(self): + self.t.systems.checkUserCredential.side_effect = UnauthorizedError() + establish_credentials(self.t, "frontera", verbose=True) + self.t.systems.createUserCredential.assert_called_once() + + def test_handles_none_authn_method(self): + self.mock_system.defaultAuthnMethod = None + with self.assertRaises(CredentialError) as ctx: + establish_credentials(self.t, "frontera", verbose=False) + self.assertIn("None", str(ctx.exception)) + + +class TestRevokeCredentials(unittest.TestCase): + def setUp(self): + self.t = MagicMock() + self.t.username = "testuser" + + def test_revokes_existing_credentials(self): + revoke_credentials(self.t, "frontera", verbose=False) + self.t.systems.removeUserCredential.assert_called_once_with( + systemId="frontera", userName="testuser" + ) + + def test_idempotent_when_not_found(self): + self.t.systems.removeUserCredential.side_effect = NotFoundError() + # Should not raise + revoke_credentials(self.t, "frontera", verbose=False) + + def test_idempotent_when_unauthorized(self): + self.t.systems.removeUserCredential.side_effect = UnauthorizedError() + revoke_credentials(self.t, "frontera", verbose=False) + + def test_raises_credential_error_on_api_error(self): + self.t.systems.removeUserCredential.side_effect = BaseTapyException( + "server error" + ) + with self.assertRaises(CredentialError): + revoke_credentials(self.t, "frontera", verbose=False) + + def test_raises_value_error_for_empty_system_id(self): + with self.assertRaises(ValueError): + revoke_credentials(self.t, "") + + def test_uses_explicit_username(self): + revoke_credentials(self.t, "frontera", username="otheruser", verbose=False) + self.t.systems.removeUserCredential.assert_called_once_with( + systemId="frontera", userName="otheruser" + ) + + def test_raises_credential_error_on_generic_exception(self): + self.t.systems.removeUserCredential.side_effect = RuntimeError("boom") + with self.assertRaises(CredentialError): + revoke_credentials(self.t, "frontera", verbose=False) + + +if __name__ == "__main__": + unittest.main() From 6611d928b9bfae63d56e117e67ad39731f1aa2c1 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 09:06:25 -0500 Subject: [PATCH 02/21] Switch from mkdocs to Jupyter Book v2 Remove mkdocs.yml, add myst.yml for Jupyter Book v2 / MyST-MD. Update docs.yml workflow to use jupyter-book>=2 with Node.js. Add jupyter-book ^2.0.0 as docs dependency in pyproject.toml. Add TMS credentials example to _toc.yml. --- .github/workflows/docs.yml | 11 +- _toc.yml | 1 + mkdocs.yml | 104 ---- myst.yml | 57 ++ poetry.lock | 1165 ++++++++++++++++++++++++------------ pyproject.toml | 3 + 6 files changed, 858 insertions(+), 483 deletions(-) delete mode 100644 mkdocs.yml create mode 100644 myst.yml diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 7d107fd..470891b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -26,19 +26,24 @@ jobs: with: python-version: "3.12" + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "22" + - name: Install dependencies run: | - pip install "jupyter-book<1" + pip install "jupyter-book>=2" pip install . - name: Build the book run: | - jupyter-book build . --path-output out + jupyter-book build --html - name: Upload artifact uses: actions/upload-pages-artifact@v3 with: - path: ./out/_build/html + path: ./_build/html deploy: environment: diff --git a/_toc.yml b/_toc.yml index b8f1e78..4bb1c17 100644 --- a/_toc.yml +++ b/_toc.yml @@ -20,6 +20,7 @@ parts: - file: docs/examples/mpm - file: docs/examples/opensees - file: docs/examples/openfoam + - file: docs/examples/tms_credentials - file: docs/examples/database - caption: API Reference chapters: diff --git a/mkdocs.yml b/mkdocs.yml deleted file mode 100644 index e156356..0000000 --- a/mkdocs.yml +++ /dev/null @@ -1,104 +0,0 @@ -site_name: DAPI - DesignSafe API -site_description: Documentation for the DesignSafe API (dapi) package -site_author: Krishna Kumar, Pedro Arduino, Scott Brandenberg -site_url: https://designsafe-ci.github.io/dapi -repo_url: https://github.com/DesignSafe-CI/dapi -repo_name: DesignSafe-CI/dapi - -theme: - name: material - logo: nheri.png - favicon: favicon.ico - palette: - # Palette toggle for light mode - - scheme: default - primary: blue - accent: blue - toggle: - icon: material/brightness-7 - name: Switch to dark mode - # Palette toggle for dark mode - - scheme: slate - primary: blue - accent: blue - toggle: - icon: material/brightness-4 - name: Switch to light mode - features: - - navigation.tabs - - navigation.sections - - navigation.expand - - navigation.path - - navigation.top - - search.highlight - - search.share - - content.code.copy - - content.tabs.link - -plugins: - - search - - autorefs - - mkdocstrings: - handlers: - python: - options: - docstring_style: google - show_source: true - show_if_no_docstring: true - inherited_members: true - members_order: source - separate_signature: true - unwrap_annotated: true - filters: ["!^_"] - merge_init_into_class: true - docstring_section_style: spacy - -markdown_extensions: - - pymdownx.highlight: - anchor_linenums: true - - pymdownx.inlinehilite - - pymdownx.snippets - - pymdownx.superfences - - pymdownx.tabbed: - alternate_style: true - - admonition - - pymdownx.details - - pymdownx.superfences - - attr_list - - md_in_html - -nav: - - Home: - - Overview: index.md - - Getting Started: - - Installation: installation.md - - Authentication: authentication.md - - Quick Start: quickstart.md - - User Guide: - - Jobs: jobs.md - - Database Access: database.md - - Examples: - - Overview: examples.md - - App Discovery: examples/apps.md - - MPM Job: examples/mpm.md - - OpenSees Job: examples/opensees.md - - OpenFOAM Job: examples/openfoam.md - - TMS Credentials: examples/tms_credentials.md - - Database Queries: examples/database.md - - API Reference: - - Overview: api/index.md - - DSClient: api/client.md - - Jobs: api/jobs.md - - Files: api/files.md - - Apps: api/apps.md - - Systems: api/systems.md - - Database: api/database.md - - Auth: api/auth.md - - Exceptions: api/exceptions.md - -extra: - social: - - icon: fontawesome/brands/github - link: https://github.com/DesignSafe-CI/dapi - css: - - stylesheets/extra.css \ No newline at end of file diff --git a/myst.yml b/myst.yml new file mode 100644 index 0000000..c3bfecb --- /dev/null +++ b/myst.yml @@ -0,0 +1,57 @@ +version: 1 +project: + title: DAPI - DesignSafe API + authors: + - name: Krishna Kumar + email: krishnak@utexas.edu + - name: Pedro Arduino + email: parduino@uw.edu + - name: Scott Brandenberg + email: sjbrandenberg@ucla.edu + copyright: "2024" + github: https://github.com/DesignSafe-CI/dapi + license: MIT + toc: + - file: docs/index.md + - title: Getting Started + children: + - file: docs/installation.md + - file: docs/authentication.md + - file: docs/quickstart.md + - title: User Guide + children: + - file: docs/jobs.md + - file: docs/database.md + - title: Examples + children: + - file: docs/examples.md + - file: docs/examples/apps.md + - file: docs/examples/mpm.md + - file: docs/examples/opensees.md + - file: docs/examples/openfoam.md + - file: docs/examples/tms_credentials.md + - file: docs/examples/database.md + - title: API Reference + children: + - file: docs/api/index.md + - file: docs/api/client.md + - file: docs/api/jobs.md + - file: docs/api/files.md + - file: docs/api/apps.md + - file: docs/api/systems.md + - file: docs/api/database.md + - file: docs/api/auth.md + - file: docs/api/exceptions.md + exclude: + - _build/** + - out/** + - "**.ipynb_checkpoints" + - examples/** + - README.md + - AUTHORS.md + - LICENSE.md +site: + template: book-theme + options: + logo: docs/nheri.png + favicon: docs/favicon.ico diff --git a/poetry.lock b/poetry.lock index 482e748..153ce7f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,25 +1,159 @@ # This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +[[package]] +name = "anyio" +version = "4.12.1" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c"}, + {file = "anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +trio = ["trio (>=0.31.0) ; python_version < \"3.10\"", "trio (>=0.32.0) ; python_version >= \"3.10\""] + [[package]] name = "appnope" version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = ">=3.6" -groups = ["dev"] +groups = ["dev", "docs"] markers = "platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, ] +[[package]] +name = "argon2-cffi" +version = "25.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741"}, + {file = "argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +groups = ["docs"] +markers = "python_version >= \"3.14\"" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "argon2-cffi-bindings" +version = "25.1.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +markers = "python_version < \"3.14\"" +files = [ + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:3d3f05610594151994ca9ccb3c771115bdb4daef161976a266f0dd8aa9996b8f"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8b8efee945193e667a396cbc7b4fb7d357297d6234d30a489905d96caabde56b"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3c6702abc36bf3ccba3f802b799505def420a1b7039862014a65db3205967f5a"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1c70058c6ab1e352304ac7e3b52554daadacd8d453c1752e547c76e9c99ac44"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2fd3bfbff3c5d74fef31a722f729bf93500910db650c925c2d6ef879a7e51cb"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4f9665de60b1b0e99bcd6be4f17d90339698ce954cfd8d9cf4f91c995165a92"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ba92837e4a9aa6a508c8d2d7883ed5a8f6c308c89a4790e1e447a220deb79a85"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win32.whl", hash = "sha256:84a461d4d84ae1295871329b346a97f68eade8c53b6ed9a7ca2d7467f3c8ff6f"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b55aec3565b65f56455eebc9b9f34130440404f27fe21c3b375bf1ea4d8fbae6"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:87c33a52407e4c41f3b70a9c2d3f6056d88b10dad7695be708c5021673f55623"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6dca33a9859abf613e22733131fc9194091c1fa7cb3e131c143056b4856aa47e"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21378b40e1b8d1655dd5310c84a40fc19a9aa5e6366e835ceb8576bf0fea716d"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d588dec224e2a83edbdc785a5e6f3c6cd736f46bfd4b441bbb5aa1f5085e584"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5acb4e41090d53f17ca1110c3427f0a130f944b896fc8c83973219c97f57b690"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:da0c79c23a63723aa5d782250fbf51b768abca630285262fb5144ba5ae01e520"}, + {file = "argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d"}, +] + +[package.dependencies] +cffi = {version = ">=1.0.1", markers = "python_version < \"3.14\""} + +[[package]] +name = "arrow" +version = "1.4.0" +description = "Better dates & times for Python" +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205"}, + {file = "arrow-1.4.0.tar.gz", hash = "sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" +tzdata = {version = "*", markers = "python_version >= \"3.9\""} + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2025.2)", "simplejson (==3.*)"] + [[package]] name = "asttokens" version = "3.0.0" description = "Annotate AST trees with source code positions" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, @@ -46,7 +180,7 @@ version = "25.3.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "docs"] files = [ {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, @@ -61,38 +195,27 @@ tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothe tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] -name = "babel" -version = "2.17.0" -description = "Internationalization utilities" +name = "beautifulsoup4" +version = "4.14.3" +description = "Screen-scraping library" optional = false -python-versions = ">=3.8" -groups = ["main"] +python-versions = ">=3.7.0" +groups = ["docs"] files = [ - {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, - {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, + {file = "beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb"}, + {file = "beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86"}, ] -[package.extras] -dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] - -[[package]] -name = "backrefs" -version = "5.8" -description = "A wrapper around re and regex that adds additional back references." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "backrefs-5.8-py310-none-any.whl", hash = "sha256:c67f6638a34a5b8730812f5101376f9d41dc38c43f1fdc35cb54700f6ed4465d"}, - {file = "backrefs-5.8-py311-none-any.whl", hash = "sha256:2e1c15e4af0e12e45c8701bd5da0902d326b2e200cafcd25e49d9f06d44bb61b"}, - {file = "backrefs-5.8-py312-none-any.whl", hash = "sha256:bbef7169a33811080d67cdf1538c8289f76f0942ff971222a16034da88a73486"}, - {file = "backrefs-5.8-py313-none-any.whl", hash = "sha256:e3a63b073867dbefd0536425f43db618578528e3896fb77be7141328642a1585"}, - {file = "backrefs-5.8-py39-none-any.whl", hash = "sha256:a66851e4533fb5b371aa0628e1fee1af05135616b86140c9d787a2ffdf4b8fdc"}, - {file = "backrefs-5.8.tar.gz", hash = "sha256:2cab642a205ce966af3dd4b38ee36009b31fa9502a35fd61d59ccc116e40a6bd"}, -] +[package.dependencies] +soupsieve = ">=1.6.1" +typing-extensions = ">=4.0.0" [package.extras] -extras = ["regex"] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] [[package]] name = "black" @@ -143,6 +266,46 @@ d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \" jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "bleach" +version = "6.2.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.9" +groups = ["docs"] +markers = "python_version == \"3.9\"" +files = [ + {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, + {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, +] + +[package.dependencies] +tinycss2 = {version = ">=1.1.0,<1.5", optional = true, markers = "extra == \"css\""} +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.5)"] + +[[package]] +name = "bleach" +version = "6.3.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.10" +groups = ["docs"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6"}, + {file = "bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22"}, +] + +[package.dependencies] +tinycss2 = {version = ">=1.1.0,<1.5", optional = true, markers = "extra == \"css\""} +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.5)"] + [[package]] name = "certifi" version = "2025.4.26" @@ -161,7 +324,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["main", "dev", "docs"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -344,7 +507,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["dev"] markers = "python_version == \"3.9\"" files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, @@ -360,7 +523,7 @@ version = "8.2.1" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" -groups = ["main", "dev"] +groups = ["dev"] markers = "python_version >= \"3.10\"" files = [ {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, @@ -388,12 +551,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "dev"] +groups = ["main", "dev", "docs"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} +markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\" or platform_system == \"Windows\"", docs = "sys_platform == \"win32\""} [[package]] name = "comm" @@ -401,7 +564,7 @@ version = "0.2.2" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, @@ -531,7 +694,7 @@ version = "1.8.14" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "debugpy-1.8.14-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:93fee753097e85623cab1c0e6a68c76308cd9f13ffdf44127e6fab4fbf024339"}, {file = "debugpy-1.8.14-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d937d93ae4fa51cdc94d3e865f535f185d5f9748efb41d0d49e33bf3365bd79"}, @@ -567,19 +730,31 @@ version = "5.2.1" description = "Decorators for Humans" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, ] +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["docs"] +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + [[package]] name = "exceptiongroup" version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["main", "dev", "docs"] markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, @@ -598,7 +773,7 @@ version = "2.2.0" description = "Get the currently executing AST node of a frame, and other information" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, @@ -608,22 +783,31 @@ files = [ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] -name = "ghp-import" -version = "2.1.0" -description = "Copy your docs directly to the gh-pages branch." +name = "fastjsonschema" +version = "2.21.2" +description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" -groups = ["main"] +groups = ["docs"] files = [ - {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, - {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, + {file = "fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463"}, + {file = "fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de"}, ] -[package.dependencies] -python-dateutil = ">=2.8.1" - [package.extras] -dev = ["flake8", "markdown", "twine", "wheel"] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +groups = ["docs"] +files = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] [[package]] name = "greenlet" @@ -694,28 +878,13 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] -[[package]] -name = "griffe" -version = "1.7.3" -description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "griffe-1.7.3-py3-none-any.whl", hash = "sha256:c6b3ee30c2f0f17f30bcdef5068d6ab7a2a4f1b8bf1a3e74b56fffd21e1c5f75"}, - {file = "griffe-1.7.3.tar.gz", hash = "sha256:52ee893c6a3a968b639ace8015bec9d36594961e156e23315c8e8e51401fa50b"}, -] - -[package.dependencies] -colorama = ">=0.4" - [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main"] +groups = ["main", "docs"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -730,7 +899,7 @@ version = "8.7.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["dev", "docs"] markers = "python_version == \"3.9\"" files = [ {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, @@ -767,7 +936,7 @@ version = "6.29.5" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, @@ -801,7 +970,7 @@ version = "8.18.1" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" -groups = ["dev"] +groups = ["dev", "docs"] markers = "python_version == \"3.9\"" files = [ {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, @@ -840,7 +1009,7 @@ version = "8.37.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" -groups = ["dev"] +groups = ["dev", "docs"] markers = "python_version == \"3.10\"" files = [ {file = "ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2"}, @@ -880,7 +1049,7 @@ version = "9.3.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.11" -groups = ["dev"] +groups = ["dev", "docs"] markers = "python_version >= \"3.11\"" files = [ {file = "ipython-9.3.0-py3-none-any.whl", hash = "sha256:1a0b6dd9221a1f5dddf725b57ac0cb6fddc7b5f470576231ae9162b9b3455a04"}, @@ -914,7 +1083,7 @@ version = "1.1.1" description = "Defines a variety of Pygments lexers for highlighting IPython code." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] markers = "python_version >= \"3.11\"" files = [ {file = "ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c"}, @@ -936,13 +1105,28 @@ files = [ {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, ] +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +optional = false +python-versions = ">=3.7" +groups = ["docs"] +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] + +[package.dependencies] +arrow = ">=0.15.0" + [[package]] name = "jedi" version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, @@ -962,7 +1146,7 @@ version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["docs"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -974,13 +1158,25 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +groups = ["docs"] +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + [[package]] name = "jsonschema" version = "4.24.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "docs"] files = [ {file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"}, {file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"}, @@ -988,9 +1184,17 @@ files = [ [package.dependencies] attrs = ">=22.2.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} rpds-py = ">=0.7.1" +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -1020,7 +1224,7 @@ version = "2025.4.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "docs"] files = [ {file = "jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af"}, {file = "jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608"}, @@ -1029,13 +1233,35 @@ files = [ [package.dependencies] referencing = ">=0.31.0" +[[package]] +name = "jupyter-book" +version = "2.1.2" +description = "Create computational narratives that are reusable, reproducible, and interactive." +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "jupyter_book-2.1.2-py3-none-any.whl", hash = "sha256:1e92850680782ca777452780f9dee0550b52af338e6fe8c115961142ebf30a0e"}, + {file = "jupyter_book-2.1.2.tar.gz", hash = "sha256:94bb8e63ef191e88cb6e7ab8a1ac66c7dc354ac228de1f558e5beb0381d38f8a"}, +] + +[package.dependencies] +ipykernel = "*" +jupyter-core = "*" +jupyter-server = "*" +nodeenv = ">=1.9.1" +platformdirs = ">=4.2.2" + +[package.extras] +docs = ["markdown", "pandas"] + [[package]] name = "jupyter-client" version = "8.6.3" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, @@ -1059,7 +1285,7 @@ version = "5.8.1" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0"}, {file = "jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941"}, @@ -1074,6 +1300,102 @@ traitlets = ">=5.3" docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest (<9)", "pytest-cov", "pytest-timeout"] +[[package]] +name = "jupyter-events" +version = "0.12.0" +description = "Jupyter Event System library" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb"}, + {file = "jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b"}, +] + +[package.dependencies] +jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +packaging = "*" +python-json-logger = ">=2.0.4" +pyyaml = ">=5.3" +referencing = "*" +rfc3339-validator = "*" +rfc3986-validator = ">=0.1.1" +traitlets = ">=5.3" + +[package.extras] +cli = ["click", "rich"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8)", "sphinxcontrib-spelling"] +test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] + +[[package]] +name = "jupyter-server" +version = "2.17.0" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "jupyter_server-2.17.0-py3-none-any.whl", hash = "sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f"}, + {file = "jupyter_server-2.17.0.tar.gz", hash = "sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5"}, +] + +[package.dependencies] +anyio = ">=3.1.0" +argon2-cffi = ">=21.1" +jinja2 = ">=3.0.3" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-events = ">=0.11.0" +jupyter-server-terminals = ">=0.4.4" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +overrides = {version = ">=5.0", markers = "python_version < \"3.12\""} +packaging = ">=22.0" +prometheus-client = ">=0.9" +pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = ">=1.8.2" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = ">=1.7" + +[package.extras] +docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.5.4" +description = "A Jupyter Server Extension Providing Terminals." +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "jupyter_server_terminals-0.5.4-py3-none-any.whl", hash = "sha256:55be353fc74a80bc7f3b20e6be50a55a61cd525626f578dcb66a5708e2007d14"}, + {file = "jupyter_server_terminals-0.5.4.tar.gz", hash = "sha256:bbda128ed41d0be9020349f9f1f2a4ab9952a73ed5f5ac9f1419794761fb87f5"}, +] + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.3.0" +description = "Pygments theme using JupyterLab CSS variables" +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, + {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, +] + [[package]] name = "lazy-object-proxy" version = "1.11.0" @@ -1098,32 +1420,13 @@ files = [ {file = "lazy_object_proxy-1.11.0.tar.gz", hash = "sha256:18874411864c9fbbbaa47f9fc1dd7aea754c86cfde21278ef427639d1dd78e9c"}, ] -[[package]] -name = "markdown" -version = "3.8" -description = "Python implementation of John Gruber's Markdown." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "markdown-3.8-py3-none-any.whl", hash = "sha256:794a929b79c5af141ef5ab0f2f642d0f7b1872981250230e72682346f7cc90dc"}, - {file = "markdown-3.8.tar.gz", hash = "sha256:7df81e63f0df5c4b24b7d156eb81e4690595239b7d70937d0409f1b0de319c6f"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] -testing = ["coverage", "pyyaml"] - [[package]] name = "markupsafe" version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main", "docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1194,7 +1497,7 @@ version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, @@ -1204,206 +1507,176 @@ files = [ traitlets = "*" [[package]] -name = "mergedeep" -version = "1.3.4" -description = "A deep merge function for 🐍." -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, - {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, -] - -[[package]] -name = "mkdocs" -version = "1.6.1" -description = "Project documentation with Markdown." +name = "mistune" +version = "3.2.0" +description = "A sane and fast Markdown parser with useful plugins and renderers" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["docs"] files = [ - {file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"}, - {file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"}, + {file = "mistune-3.2.0-py3-none-any.whl", hash = "sha256:febdc629a3c78616b94393c6580551e0e34cc289987ec6c35ed3f4be42d0eee1"}, + {file = "mistune-3.2.0.tar.gz", hash = "sha256:708487c8a8cdd99c9d90eb3ed4c3ed961246ff78ac82f03418f5183ab70e398a"}, ] [package.dependencies] -click = ">=7.0" -colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} -ghp-import = ">=1.0" -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} -jinja2 = ">=2.11.1" -markdown = ">=3.3.6" -markupsafe = ">=2.0.1" -mergedeep = ">=1.3.4" -mkdocs-get-deps = ">=0.2.0" -packaging = ">=20.5" -pathspec = ">=0.11.1" -pyyaml = ">=5.1" -pyyaml-env-tag = ">=0.1" -watchdog = ">=2.0" - -[package.extras] -i18n = ["babel (>=2.9.0)"] -min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4) ; platform_system == \"Windows\"", "ghp-import (==1.0)", "importlib-metadata (==4.4) ; python_version < \"3.10\"", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] - -[[package]] -name = "mkdocs-autorefs" -version = "1.4.2" -description = "Automatically link across pages in MkDocs." +typing-extensions = {version = "*", markers = "python_version < \"3.11\""} + +[[package]] +name = "more-itertools" +version = "10.7.0" +description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "mkdocs_autorefs-1.4.2-py3-none-any.whl", hash = "sha256:83d6d777b66ec3c372a1aad4ae0cf77c243ba5bcda5bf0c6b8a2c5e7a3d89f13"}, - {file = "mkdocs_autorefs-1.4.2.tar.gz", hash = "sha256:e2ebe1abd2b67d597ed19378c0fff84d73d1dbce411fce7a7cc6f161888b6749"}, + {file = "more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e"}, + {file = "more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3"}, ] -[package.dependencies] -Markdown = ">=3.3" -markupsafe = ">=2.0.1" -mkdocs = ">=1.1" - [[package]] -name = "mkdocs-get-deps" -version = "0.2.0" -description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["dev"] files = [ - {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, - {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] -[package.dependencies] -importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} -mergedeep = ">=1.3.4" -platformdirs = ">=2.2.0" -pyyaml = ">=5.1" - [[package]] -name = "mkdocs-material" -version = "9.6.14" -description = "Documentation that simply works" +name = "nbclient" +version = "0.10.2" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false -python-versions = ">=3.8" -groups = ["main"] +python-versions = ">=3.9.0" +groups = ["docs"] +markers = "python_version == \"3.9\"" files = [ - {file = "mkdocs_material-9.6.14-py3-none-any.whl", hash = "sha256:3b9cee6d3688551bf7a8e8f41afda97a3c39a12f0325436d76c86706114b721b"}, - {file = "mkdocs_material-9.6.14.tar.gz", hash = "sha256:39d795e90dce6b531387c255bd07e866e027828b7346d3eba5ac3de265053754"}, + {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, + {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, ] [package.dependencies] -babel = ">=2.10,<3.0" -backrefs = ">=5.7.post1,<6.0" -colorama = ">=0.4,<1.0" -jinja2 = ">=3.1,<4.0" -markdown = ">=3.2,<4.0" -mkdocs = ">=1.6,<2.0" -mkdocs-material-extensions = ">=1.3,<2.0" -paginate = ">=0.5,<1.0" -pygments = ">=2.16,<3.0" -pymdown-extensions = ">=10.2,<11.0" -requests = ">=2.26,<3.0" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +nbformat = ">=5.1" +traitlets = ">=5.4" [package.extras] -git = ["mkdocs-git-committers-plugin-2 (>=1.1,<3)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] -imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] -recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] +dev = ["pre-commit"] +docs = ["autodoc-traits", "flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "mock", "moto", "myst-parser", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling", "testpath", "xmltodict"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] -name = "mkdocs-material-extensions" -version = "1.3.1" -description = "Extension pack for Python Markdown and MkDocs Material." +name = "nbclient" +version = "0.10.4" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, - {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, -] - -[[package]] -name = "mkdocstrings" -version = "0.29.1" -description = "Automatic documentation from sources, for MkDocs." -optional = false -python-versions = ">=3.9" -groups = ["main"] +python-versions = ">=3.10.0" +groups = ["docs"] +markers = "python_version >= \"3.10\"" files = [ - {file = "mkdocstrings-0.29.1-py3-none-any.whl", hash = "sha256:37a9736134934eea89cbd055a513d40a020d87dfcae9e3052c2a6b8cd4af09b6"}, - {file = "mkdocstrings-0.29.1.tar.gz", hash = "sha256:8722f8f8c5cd75da56671e0a0c1bbed1df9946c0cef74794d6141b34011abd42"}, + {file = "nbclient-0.10.4-py3-none-any.whl", hash = "sha256:9162df5a7373d70d606527300a95a975a47c137776cd942e52d9c7e29ff83440"}, + {file = "nbclient-0.10.4.tar.gz", hash = "sha256:1e54091b16e6da39e297b0ece3e10f6f29f4ac4e8ee515d29f8a7099bd6553c9"}, ] [package.dependencies] -importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} -Jinja2 = ">=2.11.1" -Markdown = ">=3.6" -MarkupSafe = ">=1.1" -mkdocs = ">=1.6" -mkdocs-autorefs = ">=1.4" -pymdown-extensions = ">=6.3" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +nbformat = ">=5.1.3" +traitlets = ">=5.4" [package.extras] -crystal = ["mkdocstrings-crystal (>=0.3.4)"] -python = ["mkdocstrings-python (>=1.16.2)"] -python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] +dev = ["pre-commit"] +docs = ["autodoc-traits", "flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "mock", "moto", "myst-parser", "nbconvert (>=7.1.0)", "pytest (>=9.0.1,<10)", "pytest-asyncio (>=1.3.0)", "pytest-cov (>=4.0)", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling", "testpath", "xmltodict"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.1.0)", "pytest (>=9.0.1,<10)", "pytest-asyncio (>=1.3.0)", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] -name = "mkdocstrings-python" -version = "1.16.12" -description = "A Python handler for mkdocstrings." +name = "nbconvert" +version = "7.17.0" +description = "Convert Jupyter Notebooks (.ipynb files) to other formats." optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["docs"] files = [ - {file = "mkdocstrings_python-1.16.12-py3-none-any.whl", hash = "sha256:22ded3a63b3d823d57457a70ff9860d5a4de9e8b1e482876fc9baabaf6f5f374"}, - {file = "mkdocstrings_python-1.16.12.tar.gz", hash = "sha256:9b9eaa066e0024342d433e332a41095c4e429937024945fea511afe58f63175d"}, + {file = "nbconvert-7.17.0-py3-none-any.whl", hash = "sha256:4f99a63b337b9a23504347afdab24a11faa7d86b405e5c8f9881cd313336d518"}, + {file = "nbconvert-7.17.0.tar.gz", hash = "sha256:1b2696f1b5be12309f6c7d707c24af604b87dfaf6d950794c7b07acab96dda78"}, ] [package.dependencies] -griffe = ">=1.6.2" -mkdocs-autorefs = ">=1.4" -mkdocstrings = ">=0.28.3" -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} +beautifulsoup4 = "*" +bleach = {version = "!=5.0.0", extras = ["css"]} +defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<4" +nbclient = ">=0.5.0" +nbformat = ">=5.7" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +traitlets = ">=5.1" -[[package]] -name = "more-itertools" -version = "10.7.0" -description = "More routines for operating on iterables, beyond itertools" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e"}, - {file = "more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3"}, -] +[package.extras] +all = ["flaky", "intersphinx-registry", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (>=5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] +docs = ["intersphinx-registry", "ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (>=5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["pyqtwebengine (>=5.15)"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] +webpdf = ["playwright"] [[package]] -name = "mypy-extensions" -version = "1.1.0" -description = "Type system extensions for programs checked with the mypy type checker." +name = "nbformat" +version = "5.10.4" +description = "The Jupyter Notebook format" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["docs"] files = [ - {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, - {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, + {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, + {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, ] +[package.dependencies] +fastjsonschema = ">=2.15" +jsonschema = ">=2.6" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + [[package]] name = "nest-asyncio" version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, ] +[[package]] +name = "nodeenv" +version = "1.10.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["docs"] +files = [ + {file = "nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827"}, + {file = "nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb"}, +] + [[package]] name = "numpy" version = "1.26.4" @@ -1650,34 +1923,31 @@ openapi-schema-validator = ">=0.3.2,<0.5" [package.extras] requests = ["requests"] +[[package]] +name = "overrides" +version = "7.7.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +groups = ["docs"] +markers = "python_version < \"3.12\"" +files = [ + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, +] + [[package]] name = "packaging" version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["dev", "docs"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] -[[package]] -name = "paginate" -version = "0.5.7" -description = "Divides large result sets into pages for easier browsing" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"}, - {file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"}, -] - -[package.extras] -dev = ["pytest", "tox"] -lint = ["black"] - [[package]] name = "pandas" version = "2.3.0" @@ -1765,6 +2035,18 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.9.2)"] +[[package]] +name = "pandocfilters" +version = "1.5.1" +description = "Utilities for writing pandoc filters in python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["docs"] +files = [ + {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, + {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, +] + [[package]] name = "parse" version = "1.20.2" @@ -1783,7 +2065,7 @@ version = "0.8.4" description = "A Python Parser" optional = false python-versions = ">=3.6" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, @@ -1811,39 +2093,19 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] -[[package]] -name = "pdoc" -version = "14.7.0" -description = "API Documentation for Python Projects" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pdoc-14.7.0-py3-none-any.whl", hash = "sha256:72377a907efc6b2c5b3c56b717ef34f11d93621dced3b663f3aede0b844c0ad2"}, - {file = "pdoc-14.7.0.tar.gz", hash = "sha256:2d28af9c0acc39180744ad0543e4bbc3223ecba0d1302db315ec521c51f71f93"}, -] - -[package.dependencies] -Jinja2 = ">=2.11.0" -MarkupSafe = "*" -pygments = ">=2.12.0" - -[package.extras] -dev = ["hypothesis", "mypy", "pdoc-pyo3-sample-library (==1.0.11)", "pygments (>=2.14.0)", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"] - [[package]] name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" -groups = ["dev"] +groups = ["dev", "docs"] markers = "python_version == \"3.9\" and sys_platform != \"win32\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, @@ -1859,7 +2121,7 @@ version = "4.3.8" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["dev", "docs"] files = [ {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, @@ -1886,13 +2148,30 @@ files = [ dev = ["pre-commit", "tox"] testing = ["coverage", "pytest", "pytest-benchmark"] +[[package]] +name = "prometheus-client" +version = "0.24.1" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "prometheus_client-0.24.1-py3-none-any.whl", hash = "sha256:150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055"}, + {file = "prometheus_client-0.24.1.tar.gz", hash = "sha256:7e0ced7fbbd40f7b84962d5d2ab6f17ef88a72504dcf7c0b40737b43b2a461f9"}, +] + +[package.extras] +aiohttp = ["aiohttp"] +django = ["django"] +twisted = ["twisted"] + [[package]] name = "prompt-toolkit" version = "3.0.51" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07"}, {file = "prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed"}, @@ -1907,7 +2186,7 @@ version = "7.0.0" description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." optional = false python-versions = ">=3.6" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, @@ -1931,12 +2210,12 @@ version = "0.7.0" description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" -groups = ["dev"] -markers = "python_version == \"3.9\" and sys_platform != \"win32\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +groups = ["dev", "docs"] files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] +markers = {dev = "python_version == \"3.9\" and sys_platform != \"win32\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"", docs = "sys_platform != \"win32\" and python_version == \"3.9\" or sys_platform != \"win32\" and sys_platform != \"emscripten\" or os_name != \"nt\""} [[package]] name = "pure-eval" @@ -1944,7 +2223,7 @@ version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -1959,7 +2238,7 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["main", "dev", "docs"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -1972,7 +2251,7 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["dev", "docs"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -1999,25 +2278,6 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] -[[package]] -name = "pymdown-extensions" -version = "10.15" -description = "Extension pack for Python Markdown." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pymdown_extensions-10.15-py3-none-any.whl", hash = "sha256:46e99bb272612b0de3b7e7caf6da8dd5f4ca5212c0b273feb9304e236c484e5f"}, - {file = "pymdown_extensions-10.15.tar.gz", hash = "sha256:0e5994e32155f4b03504f939e501b981d306daf7ec2aa1cd2eb6bd300784f8f7"}, -] - -[package.dependencies] -markdown = ">=3.6" -pyyaml = "*" - -[package.extras] -extra = ["pygments (>=2.19.1)"] - [[package]] name = "pymysql" version = "1.1.1" @@ -2063,7 +2323,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "dev"] +groups = ["main", "dev", "docs"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2087,6 +2347,24 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-json-logger" +version = "4.0.0" +description = "JSON Log Formatter for the Python Logging Package" +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2"}, + {file = "python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f"}, +] + +[package.dependencies] +typing_extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] + [[package]] name = "pytz" version = "2025.2" @@ -2105,7 +2383,7 @@ version = "310" description = "Python for Window Extensions" optional = false python-versions = "*" -groups = ["dev"] +groups = ["dev", "docs"] markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" files = [ {file = "pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1"}, @@ -2126,13 +2404,40 @@ files = [ {file = "pywin32-310-cp39-cp39-win_amd64.whl", hash = "sha256:96867217335559ac619f00ad70e513c0fcf84b8a3af9fc2bba3b59b97da70475"}, ] +[[package]] +name = "pywinpty" +version = "3.0.3" +description = "Pseudo terminal support for Windows from Python." +optional = false +python-versions = ">=3.9" +groups = ["docs"] +markers = "os_name == \"nt\"" +files = [ + {file = "pywinpty-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:ff05f12d775b142b11c6fe085129bdd759b61cf7d41da6c745e78e3a1ef5bf40"}, + {file = "pywinpty-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:340ccacb4d74278a631923794ccd758471cfc8eeeeee4610b280420a17ad1e82"}, + {file = "pywinpty-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:dff25a9a6435f527d7c65608a7e62783fc12076e7d44487a4911ee91be5a8ac8"}, + {file = "pywinpty-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:fbc1e230e5b193eef4431cba3f39996a288f9958f9c9f092c8a961d930ee8f68"}, + {file = "pywinpty-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:c9081df0e49ffa86d15db4a6ba61530630e48707f987df42c9d3313537e81fc0"}, + {file = "pywinpty-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:15e79d870e18b678fb8a5a6105fd38496b55697c66e6fc0378236026bc4d59e9"}, + {file = "pywinpty-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9c91dbb026050c77bdcef964e63a4f10f01a639113c4d3658332614544c467ab"}, + {file = "pywinpty-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:fe1f7911805127c94cf51f89ab14096c6f91ffdcacf993d2da6082b2142a2523"}, + {file = "pywinpty-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:3f07a6cf1c1d470d284e614733c3d0f726d2c85e78508ea10a403140c3c0c18a"}, + {file = "pywinpty-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:15c7c0b6f8e9d87aabbaff76468dabf6e6121332c40fc1d83548d02a9d6a3759"}, + {file = "pywinpty-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:d4b6b7b0fe0cdcd02e956bd57cfe9f4e5a06514eecf3b5ae174da4f951b58be9"}, + {file = "pywinpty-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:34789d685fc0d547ce0c8a65e5a70e56f77d732fa6e03c8f74fefb8cbb252019"}, + {file = "pywinpty-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:0c37e224a47a971d1a6e08649a1714dac4f63c11920780977829ed5c8cadead1"}, + {file = "pywinpty-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c4e9c3dff7d86ba81937438d5819f19f385a39d8f592d4e8af67148ceb4f6ab5"}, + {file = "pywinpty-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:0f10e81d52d7f2c4d927f645f247028e64eaf205a3ed9e64dbd998122108a218"}, + {file = "pywinpty-3.0.3.tar.gz", hash = "sha256:523441dc34d231fb361b4b00f8c99d3f16de02f5005fd544a0183112bcc22412"}, +] + [[package]] name = "pyyaml" version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["main", "docs"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2189,28 +2494,13 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] -[[package]] -name = "pyyaml-env-tag" -version = "1.1" -description = "A custom YAML tag for referencing environment variables in YAML files." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04"}, - {file = "pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff"}, -] - -[package.dependencies] -pyyaml = "*" - [[package]] name = "pyzmq" version = "26.4.0" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "pyzmq-26.4.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:0329bdf83e170ac133f44a233fc651f6ed66ef8e66693b5af7d54f45d1ef5918"}, {file = "pyzmq-26.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:398a825d2dea96227cf6460ce0a174cf7657d6f6827807d4d1ae9d0f9ae64315"}, @@ -2316,7 +2606,7 @@ version = "0.36.2" description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "docs"] files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, @@ -2349,13 +2639,40 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["docs"] +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["docs"] +files = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] + [[package]] name = "rpds-py" version = "0.25.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "docs"] files = [ {file = "rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9"}, {file = "rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40"}, @@ -2476,6 +2793,22 @@ files = [ {file = "rpds_py-0.25.1.tar.gz", hash = "sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3"}, ] +[[package]] +name = "send2trash" +version = "2.1.0" +description = "Send file to trash natively under Mac OS X, Windows and Linux" +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "send2trash-2.1.0-py3-none-any.whl", hash = "sha256:0da2f112e6d6bb22de6aa6daa7e144831a4febf2a87261451c4ad849fe9a873c"}, + {file = "send2trash-2.1.0.tar.gz", hash = "sha256:1c72b39f09457db3c05ce1d19158c2cbef4c32b8bedd02c155e49282b7ea7459"}, +] + +[package.extras] +nativelib = ["pyobjc (>=9.0) ; sys_platform == \"darwin\"", "pywin32 (>=305) ; sys_platform == \"win32\""] +test = ["pytest (>=8)"] + [[package]] name = "setuptools" version = "80.9.0" @@ -2503,12 +2836,24 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "dev"] +groups = ["main", "dev", "docs"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] +[[package]] +name = "soupsieve" +version = "2.8.3" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95"}, + {file = "soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349"}, +] + [[package]] name = "sqlalchemy" version = "2.0.41" @@ -2611,7 +2956,7 @@ version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = false python-versions = "*" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -2653,6 +2998,47 @@ setuptools = ">=21.0.0" six = ">=1.10,<2.0" urllib3 = ">=1.26.5,<2.0.0" +[[package]] +name = "terminado" +version = "0.18.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, + {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, +] + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] + +[[package]] +name = "tinycss2" +version = "1.4.0" +description = "A tiny CSS parser" +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, + {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["pytest", "ruff"] + [[package]] name = "tokenize-rt" version = "6.2.0" @@ -2714,7 +3100,7 @@ version = "6.5.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.9" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7"}, {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6"}, @@ -2758,7 +3144,7 @@ version = "5.14.3" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -2774,7 +3160,7 @@ version = "4.14.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main", "dev", "docs"] files = [ {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, @@ -2787,12 +3173,27 @@ version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" -groups = ["main"] +groups = ["main", "docs"] files = [ {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] +[[package]] +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" +optional = false +python-versions = ">=3.7" +groups = ["docs"] +files = [ + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, +] + +[package.extras] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] + [[package]] name = "urllib3" version = "1.26.20" @@ -2810,61 +3211,73 @@ brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and p secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -[[package]] -name = "watchdog" -version = "6.0.0" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, - {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, - {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, - {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, - {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, - {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, - {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - [[package]] name = "wcwidth" version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" -groups = ["dev"] +groups = ["dev", "docs"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +[[package]] +name = "webcolors" +version = "24.11.1" +description = "A library for working with the color formats defined by HTML and CSS." +optional = false +python-versions = ">=3.9" +groups = ["docs"] +markers = "python_version == \"3.9\"" +files = [ + {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, + {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, +] + +[[package]] +name = "webcolors" +version = "25.10.0" +description = "A library for working with the color formats defined by HTML and CSS." +optional = false +python-versions = ">=3.10" +groups = ["docs"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "webcolors-25.10.0-py3-none-any.whl", hash = "sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d"}, + {file = "webcolors-25.10.0.tar.gz", hash = "sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf"}, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +groups = ["docs"] +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "websocket-client" +version = "1.9.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef"}, + {file = "websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx_rtd_theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["pytest", "websockets"] + [[package]] name = "werkzeug" version = "3.1.3" @@ -2889,7 +3302,7 @@ version = "3.23.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["dev", "docs"] markers = "python_version == \"3.9\"" files = [ {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, @@ -2907,4 +3320,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "aa84f8ee47b6ed5fd621495488b73076c0b5a71ce88a00656950f20a821b16b1" +content-hash = "593fd5d3dc363dd953f15903d4d1ae08ca4488cde4bbe8c07ae31807ebf5c6b7" diff --git a/pyproject.toml b/pyproject.toml index 6ee7cf5..3438734 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,9 @@ pytest = "^7.4.2" black = {extras = ["jupyter"], version = "^23.11.0"} ipykernel = "^6.26.0" +[tool.poetry.group.docs.dependencies] +jupyter-book = "^2.0.0" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" \ No newline at end of file From de97f1494fb0c4adebced78a3a877a2385e5a66b Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 09:25:18 -0500 Subject: [PATCH 03/21] Migrate from Poetry to uv + hatchling Replace Poetry build system with hatchling and PEP 621 metadata. Update all GitHub Actions workflows to use astral-sh/setup-uv. Remove poetry.lock. Add pytest step to CI workflow. --- .github/workflows/build-test.yml | 18 +- .github/workflows/docs.yml | 15 +- .github/workflows/pypi.yml | 17 +- poetry.lock | 3323 ------------------------------ pyproject.toml | 69 +- 5 files changed, 53 insertions(+), 3389 deletions(-) delete mode 100644 poetry.lock diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 566fb1b..51ab33f 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -7,22 +7,18 @@ jobs: fail-fast: false matrix: python-version: ["3.13"] - poetry-version: ["2.1.2"] os: [ubuntu-latest] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 + - name: Install uv + uses: astral-sh/setup-uv@v4 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Run image - uses: abatilo/actions-poetry@v2 - with: - poetry-version: ${{ matrix.poetry-version }} - - name: Update lock file - run: poetry lock - - name: Install the project dependencies - run: poetry install + - name: Install dependencies + run: uv pip install --system ".[dev]" - name: Lint with black - run: poetry run black --check . - \ No newline at end of file + run: black --check . + - name: Run tests + run: pytest tests/ diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 470891b..961c86a 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -21,10 +21,12 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v4 + + - uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - name: Set up Node.js uses: actions/setup-node@v4 @@ -32,13 +34,10 @@ jobs: node-version: "22" - name: Install dependencies - run: | - pip install "jupyter-book>=2" - pip install . + run: uv pip install --system ".[docs]" - name: Build the book - run: | - jupyter-book build --html + run: jupyter-book build --html - name: Upload artifact uses: actions/upload-pages-artifact@v3 diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index 51c428a..945b4d4 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -2,7 +2,7 @@ name: Publish to PyPI on: push: tags: - - 'v[0-9]+.[0-9]+.[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+' jobs: pypi-publish: name: Upload release to PyPI @@ -11,21 +11,18 @@ jobs: name: pypi url: https://pypi.org/p/dapi permissions: - id-token: write # IMPORTANT: this permission is mandatory for trusted publishing - contents: read # Required for checkout + id-token: write + contents: read steps: - uses: actions/checkout@v4 + - name: Install uv + uses: astral-sh/setup-uv@v4 - uses: actions/setup-python@v5 with: - python-version: 3.9 - - name: Poetry image - uses: abatilo/actions-poetry@v2 - - name: Install the project dependencies - run: poetry install + python-version: "3.13" - name: Build package - run: poetry build + run: uv build - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: - # Enable verbose logging for debugging verbose: true diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 153ce7f..0000000 --- a/poetry.lock +++ /dev/null @@ -1,3323 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. - -[[package]] -name = "anyio" -version = "4.12.1" -description = "High-level concurrency and networking framework on top of asyncio or Trio" -optional = false -python-versions = ">=3.9" -groups = ["docs"] -files = [ - {file = "anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c"}, - {file = "anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} - -[package.extras] -trio = ["trio (>=0.31.0) ; python_version < \"3.10\"", "trio (>=0.32.0) ; python_version >= \"3.10\""] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -groups = ["dev", "docs"] -markers = "platform_system == \"Darwin\"" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "argon2-cffi" -version = "25.1.0" -description = "Argon2 for Python" -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741"}, - {file = "argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1"}, -] - -[package.dependencies] -argon2-cffi-bindings = "*" - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -description = "Low-level CFFI bindings for Argon2" -optional = false -python-versions = ">=3.6" -groups = ["docs"] -markers = "python_version >= \"3.14\"" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - -[[package]] -name = "argon2-cffi-bindings" -version = "25.1.0" -description = "Low-level CFFI bindings for Argon2" -optional = false -python-versions = ">=3.9" -groups = ["docs"] -markers = "python_version < \"3.14\"" -files = [ - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:3d3f05610594151994ca9ccb3c771115bdb4daef161976a266f0dd8aa9996b8f"}, - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8b8efee945193e667a396cbc7b4fb7d357297d6234d30a489905d96caabde56b"}, - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3c6702abc36bf3ccba3f802b799505def420a1b7039862014a65db3205967f5a"}, - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1c70058c6ab1e352304ac7e3b52554daadacd8d453c1752e547c76e9c99ac44"}, - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2fd3bfbff3c5d74fef31a722f729bf93500910db650c925c2d6ef879a7e51cb"}, - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4f9665de60b1b0e99bcd6be4f17d90339698ce954cfd8d9cf4f91c995165a92"}, - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ba92837e4a9aa6a508c8d2d7883ed5a8f6c308c89a4790e1e447a220deb79a85"}, - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win32.whl", hash = "sha256:84a461d4d84ae1295871329b346a97f68eade8c53b6ed9a7ca2d7467f3c8ff6f"}, - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b55aec3565b65f56455eebc9b9f34130440404f27fe21c3b375bf1ea4d8fbae6"}, - {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:87c33a52407e4c41f3b70a9c2d3f6056d88b10dad7695be708c5021673f55623"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98"}, - {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94"}, - {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6dca33a9859abf613e22733131fc9194091c1fa7cb3e131c143056b4856aa47e"}, - {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21378b40e1b8d1655dd5310c84a40fc19a9aa5e6366e835ceb8576bf0fea716d"}, - {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d588dec224e2a83edbdc785a5e6f3c6cd736f46bfd4b441bbb5aa1f5085e584"}, - {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5acb4e41090d53f17ca1110c3427f0a130f944b896fc8c83973219c97f57b690"}, - {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:da0c79c23a63723aa5d782250fbf51b768abca630285262fb5144ba5ae01e520"}, - {file = "argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d"}, -] - -[package.dependencies] -cffi = {version = ">=1.0.1", markers = "python_version < \"3.14\""} - -[[package]] -name = "arrow" -version = "1.4.0" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205"}, - {file = "arrow-1.4.0.tar.gz", hash = "sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -tzdata = {version = "*", markers = "python_version >= \"3.9\""} - -[package.extras] -doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2025.2)", "simplejson (==3.*)"] - -[[package]] -name = "asttokens" -version = "3.0.0" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, - {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, -] - -[package.extras] -astroid = ["astroid (>=2,<4)"] -test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] - -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["main"] -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - -[[package]] -name = "attrs" -version = "25.3.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.8" -groups = ["main", "docs"] -files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, -] - -[package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] - -[[package]] -name = "beautifulsoup4" -version = "4.14.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.7.0" -groups = ["docs"] -files = [ - {file = "beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb"}, - {file = "beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86"}, -] - -[package.dependencies] -soupsieve = ">=1.6.1" -typing-extensions = ">=4.0.0" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "black" -version = "23.12.1" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, -] - -[package.dependencies] -click = ">=8.0.0" -ipython = {version = ">=7.8.0", optional = true, markers = "extra == \"jupyter\""} -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tokenize-rt = {version = ">=3.2.0", optional = true, markers = "extra == \"jupyter\""} -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "bleach" -version = "6.2.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -markers = "python_version == \"3.9\"" -files = [ - {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, - {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, -] - -[package.dependencies] -tinycss2 = {version = ">=1.1.0,<1.5", optional = true, markers = "extra == \"css\""} -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.5)"] - -[[package]] -name = "bleach" -version = "6.3.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.10" -groups = ["docs"] -markers = "python_version >= \"3.10\"" -files = [ - {file = "bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6"}, - {file = "bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22"}, -] - -[package.dependencies] -tinycss2 = {version = ">=1.1.0,<1.5", optional = true, markers = "extra == \"css\""} -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.5)"] - -[[package]] -name = "certifi" -version = "2025.4.26" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, - {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, -] - -[[package]] -name = "cffi" -version = "1.17.1" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -groups = ["main", "dev", "docs"] -files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, -] -markers = {main = "platform_python_implementation != \"PyPy\"", dev = "implementation_name == \"pypy\""} - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.4.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, - {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, - {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, -] - -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -markers = "python_version == \"3.9\"" -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "click" -version = "8.2.1" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.10" -groups = ["dev"] -markers = "python_version >= \"3.10\"" -files = [ - {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, - {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "cloudpickle" -version = "3.1.1" -description = "Pickler class to extend the standard pickle.Pickler functionality" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "cloudpickle-3.1.1-py3-none-any.whl", hash = "sha256:c8c5a44295039331ee9dad40ba100a9c7297b6f988e50e87ccdf3765a668350e"}, - {file = "cloudpickle-3.1.1.tar.gz", hash = "sha256:b216fa8ae4019d5482a8ac3c95d8f6346115d8835911fd4aefd1a445e4242c64"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "dev", "docs"] -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\" or platform_system == \"Windows\"", docs = "sys_platform == \"win32\""} - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "cryptography" -version = "43.0.3" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -groups = ["main"] -markers = "python_version == \"3.9\"" -files = [ - {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, - {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, - {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, - {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, - {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, - {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, - {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, - {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, - {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, - {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, - {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "cryptography" -version = "45.0.4" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = "!=3.9.0,!=3.9.1,>=3.7" -groups = ["main"] -markers = "python_version >= \"3.10\"" -files = [ - {file = "cryptography-45.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069"}, - {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d"}, - {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4ca0f52170e821bc8da6fc0cc565b7bb8ff8d90d36b5e9fdd68e8a86bdf72036"}, - {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f3fe7a5ae34d5a414957cc7f457e2b92076e72938423ac64d215722f6cf49a9e"}, - {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:25eb4d4d3e54595dc8adebc6bbd5623588991d86591a78c2548ffb64797341e2"}, - {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce1678a2ccbe696cf3af15a75bb72ee008d7ff183c9228592ede9db467e64f1b"}, - {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:49fe9155ab32721b9122975e168a6760d8ce4cffe423bcd7ca269ba41b5dfac1"}, - {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:2882338b2a6e0bd337052e8b9007ced85c637da19ef9ecaf437744495c8c2999"}, - {file = "cryptography-45.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:23b9c3ea30c3ed4db59e7b9619272e94891f8a3a5591d0b656a7582631ccf750"}, - {file = "cryptography-45.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0a97c927497e3bc36b33987abb99bf17a9a175a19af38a892dc4bbb844d7ee2"}, - {file = "cryptography-45.0.4-cp311-abi3-win32.whl", hash = "sha256:e00a6c10a5c53979d6242f123c0a97cff9f3abed7f064fc412c36dc521b5f257"}, - {file = "cryptography-45.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:817ee05c6c9f7a69a16200f0c90ab26d23a87701e2a284bd15156783e46dbcc8"}, - {file = "cryptography-45.0.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:964bcc28d867e0f5491a564b7debb3ffdd8717928d315d12e0d7defa9e43b723"}, - {file = "cryptography-45.0.4-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6a5bf57554e80f75a7db3d4b1dacaa2764611ae166ab42ea9a72bcdb5d577637"}, - {file = "cryptography-45.0.4-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:46cf7088bf91bdc9b26f9c55636492c1cce3e7aaf8041bbf0243f5e5325cfb2d"}, - {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7bedbe4cc930fa4b100fc845ea1ea5788fcd7ae9562e669989c11618ae8d76ee"}, - {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:eaa3e28ea2235b33220b949c5a0d6cf79baa80eab2eb5607ca8ab7525331b9ff"}, - {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7ef2dde4fa9408475038fc9aadfc1fb2676b174e68356359632e980c661ec8f6"}, - {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:6a3511ae33f09094185d111160fd192c67aa0a2a8d19b54d36e4c78f651dc5ad"}, - {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:06509dc70dd71fa56eaa138336244e2fbaf2ac164fc9b5e66828fccfd2b680d6"}, - {file = "cryptography-45.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5f31e6b0a5a253f6aa49be67279be4a7e5a4ef259a9f33c69f7d1b1191939872"}, - {file = "cryptography-45.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:944e9ccf67a9594137f942d5b52c8d238b1b4e46c7a0c2891b7ae6e01e7c80a4"}, - {file = "cryptography-45.0.4-cp37-abi3-win32.whl", hash = "sha256:c22fe01e53dc65edd1945a2e6f0015e887f84ced233acecb64b4daadb32f5c97"}, - {file = "cryptography-45.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:627ba1bc94f6adf0b0a2e35d87020285ead22d9f648c7e75bb64f367375f3b22"}, - {file = "cryptography-45.0.4-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a77c6fb8d76e9c9f99f2f3437c1a4ac287b34eaf40997cfab1e9bd2be175ac39"}, - {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7aad98a25ed8ac917fdd8a9c1e706e5a0956e06c498be1f713b61734333a4507"}, - {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3530382a43a0e524bc931f187fc69ef4c42828cf7d7f592f7f249f602b5a4ab0"}, - {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:6b613164cb8425e2f8db5849ffb84892e523bf6d26deb8f9bb76ae86181fa12b"}, - {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:96d4819e25bf3b685199b304a0029ce4a3caf98947ce8a066c9137cc78ad2c58"}, - {file = "cryptography-45.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b97737a3ffbea79eebb062eb0d67d72307195035332501722a9ca86bab9e3ab2"}, - {file = "cryptography-45.0.4-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4828190fb6c4bcb6ebc6331f01fe66ae838bb3bd58e753b59d4b22eb444b996c"}, - {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:03dbff8411206713185b8cebe31bc5c0eb544799a50c09035733716b386e61a4"}, - {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51dfbd4d26172d31150d84c19bbe06c68ea4b7f11bbc7b3a5e146b367c311349"}, - {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:0339a692de47084969500ee455e42c58e449461e0ec845a34a6a9b9bf7df7fb8"}, - {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:0cf13c77d710131d33e63626bd55ae7c0efb701ebdc2b3a7952b9b23a0412862"}, - {file = "cryptography-45.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bbc505d1dc469ac12a0a064214879eac6294038d6b24ae9f71faae1448a9608d"}, - {file = "cryptography-45.0.4.tar.gz", hash = "sha256:7405ade85c83c37682c8fe65554759800a4a8c54b2d96e0f8ad114d31b808d57"}, -] - -[package.dependencies] -cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""] -docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""] -pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] -sdist = ["build (>=1.0.0)"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==45.0.4)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "debugpy" -version = "1.8.14" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "debugpy-1.8.14-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:93fee753097e85623cab1c0e6a68c76308cd9f13ffdf44127e6fab4fbf024339"}, - {file = "debugpy-1.8.14-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d937d93ae4fa51cdc94d3e865f535f185d5f9748efb41d0d49e33bf3365bd79"}, - {file = "debugpy-1.8.14-cp310-cp310-win32.whl", hash = "sha256:c442f20577b38cc7a9aafecffe1094f78f07fb8423c3dddb384e6b8f49fd2987"}, - {file = "debugpy-1.8.14-cp310-cp310-win_amd64.whl", hash = "sha256:f117dedda6d969c5c9483e23f573b38f4e39412845c7bc487b6f2648df30fe84"}, - {file = "debugpy-1.8.14-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:1b2ac8c13b2645e0b1eaf30e816404990fbdb168e193322be8f545e8c01644a9"}, - {file = "debugpy-1.8.14-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf431c343a99384ac7eab2f763980724834f933a271e90496944195318c619e2"}, - {file = "debugpy-1.8.14-cp311-cp311-win32.whl", hash = "sha256:c99295c76161ad8d507b413cd33422d7c542889fbb73035889420ac1fad354f2"}, - {file = "debugpy-1.8.14-cp311-cp311-win_amd64.whl", hash = "sha256:7816acea4a46d7e4e50ad8d09d963a680ecc814ae31cdef3622eb05ccacf7b01"}, - {file = "debugpy-1.8.14-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:8899c17920d089cfa23e6005ad9f22582fd86f144b23acb9feeda59e84405b84"}, - {file = "debugpy-1.8.14-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6bb5c0dcf80ad5dbc7b7d6eac484e2af34bdacdf81df09b6a3e62792b722826"}, - {file = "debugpy-1.8.14-cp312-cp312-win32.whl", hash = "sha256:281d44d248a0e1791ad0eafdbbd2912ff0de9eec48022a5bfbc332957487ed3f"}, - {file = "debugpy-1.8.14-cp312-cp312-win_amd64.whl", hash = "sha256:5aa56ef8538893e4502a7d79047fe39b1dae08d9ae257074c6464a7b290b806f"}, - {file = "debugpy-1.8.14-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:329a15d0660ee09fec6786acdb6e0443d595f64f5d096fc3e3ccf09a4259033f"}, - {file = "debugpy-1.8.14-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f920c7f9af409d90f5fd26e313e119d908b0dd2952c2393cd3247a462331f15"}, - {file = "debugpy-1.8.14-cp313-cp313-win32.whl", hash = "sha256:3784ec6e8600c66cbdd4ca2726c72d8ca781e94bce2f396cc606d458146f8f4e"}, - {file = "debugpy-1.8.14-cp313-cp313-win_amd64.whl", hash = "sha256:684eaf43c95a3ec39a96f1f5195a7ff3d4144e4a18d69bb66beeb1a6de605d6e"}, - {file = "debugpy-1.8.14-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:d5582bcbe42917bc6bbe5c12db1bffdf21f6bfc28d4554b738bf08d50dc0c8c3"}, - {file = "debugpy-1.8.14-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5349b7c3735b766a281873fbe32ca9cca343d4cc11ba4a743f84cb854339ff35"}, - {file = "debugpy-1.8.14-cp38-cp38-win32.whl", hash = "sha256:7118d462fe9724c887d355eef395fae68bc764fd862cdca94e70dcb9ade8a23d"}, - {file = "debugpy-1.8.14-cp38-cp38-win_amd64.whl", hash = "sha256:d235e4fa78af2de4e5609073972700523e372cf5601742449970110d565ca28c"}, - {file = "debugpy-1.8.14-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:413512d35ff52c2fb0fd2d65e69f373ffd24f0ecb1fac514c04a668599c5ce7f"}, - {file = "debugpy-1.8.14-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c9156f7524a0d70b7a7e22b2e311d8ba76a15496fb00730e46dcdeedb9e1eea"}, - {file = "debugpy-1.8.14-cp39-cp39-win32.whl", hash = "sha256:b44985f97cc3dd9d52c42eb59ee9d7ee0c4e7ecd62bca704891f997de4cef23d"}, - {file = "debugpy-1.8.14-cp39-cp39-win_amd64.whl", hash = "sha256:b1528cfee6c1b1c698eb10b6b096c598738a8238822d218173d21c3086de8123"}, - {file = "debugpy-1.8.14-py2.py3-none-any.whl", hash = "sha256:5cd9a579d553b6cb9759a7908a41988ee6280b961f24f63336835d9418216a20"}, - {file = "debugpy-1.8.14.tar.gz", hash = "sha256:7cd287184318416850aa8b60ac90105837bb1e59531898c07569d197d2ed5322"}, -] - -[[package]] -name = "decorator" -version = "5.2.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, - {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -groups = ["docs"] -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.3.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -groups = ["main", "dev", "docs"] -markers = "python_version < \"3.11\"" -files = [ - {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, - {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "executing" -version = "2.2.0" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, - {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] - -[[package]] -name = "fastjsonschema" -version = "2.21.2" -description = "Fastest Python implementation of JSON schema" -optional = false -python-versions = "*" -groups = ["docs"] -files = [ - {file = "fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463"}, - {file = "fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -groups = ["docs"] -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] - -[[package]] -name = "greenlet" -version = "3.2.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" -files = [ - {file = "greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b"}, - {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712"}, - {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00"}, - {file = "greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302"}, - {file = "greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5"}, - {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc"}, - {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba"}, - {file = "greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34"}, - {file = "greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb"}, - {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c"}, - {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163"}, - {file = "greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849"}, - {file = "greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b"}, - {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0"}, - {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36"}, - {file = "greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3"}, - {file = "greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141"}, - {file = "greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a"}, - {file = "greenlet-3.2.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:42efc522c0bd75ffa11a71e09cd8a399d83fafe36db250a87cf1dacfaa15dc64"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d760f9bdfe79bff803bad32b4d8ffb2c1d2ce906313fc10a83976ffb73d64ca7"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8324319cbd7b35b97990090808fdc99c27fe5338f87db50514959f8059999805"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:8c37ef5b3787567d322331d5250e44e42b58c8c713859b8a04c6065f27efbf72"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce539fb52fb774d0802175d37fcff5c723e2c7d249c65916257f0a940cee8904"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:003c930e0e074db83559edc8705f3a2d066d4aa8c2f198aff1e454946efd0f26"}, - {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7e70ea4384b81ef9e84192e8a77fb87573138aa5d4feee541d8014e452b434da"}, - {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22eb5ba839c4b2156f18f76768233fe44b23a31decd9cc0d4cc8141c211fd1b4"}, - {file = "greenlet-3.2.3-cp39-cp39-win32.whl", hash = "sha256:4532f0d25df67f896d137431b13f4cdce89f7e3d4a96387a41290910df4d3a57"}, - {file = "greenlet-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:aaa7aae1e7f75eaa3ae400ad98f8644bb81e1dc6ba47ce8a93d3f17274e08322"}, - {file = "greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "idna" -version = "3.10" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -groups = ["main", "docs"] -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] - -[[package]] -name = "importlib-metadata" -version = "8.7.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version == \"3.9\"" -files = [ - {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, - {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - -[[package]] -name = "iniconfig" -version = "2.1.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, - {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, -] - -[[package]] -name = "ipykernel" -version = "6.29.5" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, - {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.18.1" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version == \"3.9\"" -files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} - -[package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] - -[[package]] -name = "ipython" -version = "8.37.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -groups = ["dev", "docs"] -markers = "python_version == \"3.10\"" -files = [ - {file = "ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2"}, - {file = "ipython-8.37.0.tar.gz", hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt_toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack_data = "*" -traitlets = ">=5.13.0" -typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "ipython" -version = "9.3.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.11" -groups = ["dev", "docs"] -markers = "python_version >= \"3.11\"" -files = [ - {file = "ipython-9.3.0-py3-none-any.whl", hash = "sha256:1a0b6dd9221a1f5dddf725b57ac0cb6fddc7b5f470576231ae9162b9b3455a04"}, - {file = "ipython-9.3.0.tar.gz", hash = "sha256:79eb896f9f23f50ad16c3bc205f686f6e030ad246cc309c6279a242b14afe9d8"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -ipython-pygments-lexers = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt_toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack_data = "*" -traitlets = ">=5.13.0" -typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[doc,matplotlib,test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinx_toml (==0.0.4)", "typing_extensions"] -matplotlib = ["matplotlib"] -test = ["packaging", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipykernel", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbclient", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "ipython-pygments-lexers" -version = "1.1.1" -description = "Defines a variety of Pygments lexers for highlighting IPython code." -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -markers = "python_version >= \"3.11\"" -files = [ - {file = "ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c"}, - {file = "ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81"}, -] - -[package.dependencies] -pygments = "*" - -[[package]] -name = "isodate" -version = "0.7.2" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, - {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, -] - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -optional = false -python-versions = ">=3.7" -groups = ["docs"] -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] - -[package.dependencies] -arrow = ">=0.15.0" - -[[package]] -name = "jedi" -version = "0.19.2" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -groups = ["dev", "docs"] -files = [ - {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, - {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, -] - -[package.dependencies] -parso = ">=0.8.4,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.6" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -groups = ["docs"] -files = [ - {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, - {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jsonpointer" -version = "3.0.0" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=3.7" -groups = ["docs"] -files = [ - {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, - {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, -] - -[[package]] -name = "jsonschema" -version = "4.24.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.9" -groups = ["main", "docs"] -files = [ - {file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"}, - {file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -rpds-py = ">=0.7.1" -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-spec" -version = "0.1.3" -description = "JSONSchema Spec with object-oriented paths" -optional = false -python-versions = ">=3.7.0,<4.0.0" -groups = ["main"] -files = [ - {file = "jsonschema_spec-0.1.3-py3-none-any.whl", hash = "sha256:b3cde007ad65c2e631e2f8653cf187124a2c714d02d9fafbab68ad64bf5745d6"}, - {file = "jsonschema_spec-0.1.3.tar.gz", hash = "sha256:8d8db7c255e524fab1016a952a9143e5b6e3c074f4ed25d1878f8e97806caec0"}, -] - -[package.dependencies] -jsonschema = ">=4.0.0,<5.0.0" -pathable = ">=0.4.1,<0.5.0" -PyYAML = ">=5.1" -typing-extensions = ">=4.3.0,<5.0.0" - -[[package]] -name = "jsonschema-specifications" -version = "2025.4.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.9" -groups = ["main", "docs"] -files = [ - {file = "jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af"}, - {file = "jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - -[[package]] -name = "jupyter-book" -version = "2.1.2" -description = "Create computational narratives that are reusable, reproducible, and interactive." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -files = [ - {file = "jupyter_book-2.1.2-py3-none-any.whl", hash = "sha256:1e92850680782ca777452780f9dee0550b52af338e6fe8c115961142ebf30a0e"}, - {file = "jupyter_book-2.1.2.tar.gz", hash = "sha256:94bb8e63ef191e88cb6e7ab8a1ac66c7dc354ac228de1f558e5beb0381d38f8a"}, -] - -[package.dependencies] -ipykernel = "*" -jupyter-core = "*" -jupyter-server = "*" -nodeenv = ">=1.9.1" -platformdirs = ">=4.2.2" - -[package.extras] -docs = ["markdown", "pandas"] - -[[package]] -name = "jupyter-client" -version = "8.6.3" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, - {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \"win32\"", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-core" -version = "5.8.1" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0"}, - {file = "jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<9)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-events" -version = "0.12.0" -description = "Jupyter Event System library" -optional = false -python-versions = ">=3.9" -groups = ["docs"] -files = [ - {file = "jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb"}, - {file = "jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b"}, -] - -[package.dependencies] -jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} -packaging = "*" -python-json-logger = ">=2.0.4" -pyyaml = ">=5.3" -referencing = "*" -rfc3339-validator = "*" -rfc3986-validator = ">=0.1.1" -traitlets = ">=5.3" - -[package.extras] -cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8)", "sphinxcontrib-spelling"] -test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] - -[[package]] -name = "jupyter-server" -version = "2.17.0" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -files = [ - {file = "jupyter_server-2.17.0-py3-none-any.whl", hash = "sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f"}, - {file = "jupyter_server-2.17.0.tar.gz", hash = "sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5"}, -] - -[package.dependencies] -anyio = ">=3.1.0" -argon2-cffi = ">=21.1" -jinja2 = ">=3.0.3" -jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -jupyter-events = ">=0.11.0" -jupyter-server-terminals = ">=0.4.4" -nbconvert = ">=6.4.4" -nbformat = ">=5.3.0" -overrides = {version = ">=5.0", markers = "python_version < \"3.12\""} -packaging = ">=22.0" -prometheus-client = ">=0.9" -pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} -pyzmq = ">=24" -send2trash = ">=1.8.2" -terminado = ">=0.8.3" -tornado = ">=6.2.0" -traitlets = ">=5.6.0" -websocket-client = ">=1.7" - -[package.extras] -docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] - -[[package]] -name = "jupyter-server-terminals" -version = "0.5.4" -description = "A Jupyter Server Extension Providing Terminals." -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "jupyter_server_terminals-0.5.4-py3-none-any.whl", hash = "sha256:55be353fc74a80bc7f3b20e6be50a55a61cd525626f578dcb66a5708e2007d14"}, - {file = "jupyter_server_terminals-0.5.4.tar.gz", hash = "sha256:bbda128ed41d0be9020349f9f1f2a4ab9952a73ed5f5ac9f1419794761fb87f5"}, -] - -[package.dependencies] -pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} -terminado = ">=0.8.3" - -[package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -description = "Pygments theme using JupyterLab CSS variables" -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, -] - -[[package]] -name = "lazy-object-proxy" -version = "1.11.0" -description = "A fast and thorough lazy object proxy." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "lazy_object_proxy-1.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:132bc8a34f2f2d662a851acfd1b93df769992ed1b81e2b1fda7db3e73b0d5a18"}, - {file = "lazy_object_proxy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:01261a3afd8621a1accb5682df2593dc7ec7d21d38f411011a5712dcd418fbed"}, - {file = "lazy_object_proxy-1.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:090935756cc041e191f22f4f9c7fd4fe9a454717067adf5b1bbd2ce3046b556e"}, - {file = "lazy_object_proxy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:76ec715017f06410f57df442c1a8d66e6b5f7035077785b129817f5ae58810a4"}, - {file = "lazy_object_proxy-1.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a9f39098e93a63618a79eef2889ae3cf0605f676cd4797fdfd49fcd7ddc318b"}, - {file = "lazy_object_proxy-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:ee13f67f4fcd044ef27bfccb1c93d39c100046fec1fad6e9a1fcdfd17492aeb3"}, - {file = "lazy_object_proxy-1.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd4c84eafd8dd15ea16f7d580758bc5c2ce1f752faec877bb2b1f9f827c329cd"}, - {file = "lazy_object_proxy-1.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:d2503427bda552d3aefcac92f81d9e7ca631e680a2268cbe62cd6a58de6409b7"}, - {file = "lazy_object_proxy-1.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0613116156801ab3fccb9e2b05ed83b08ea08c2517fdc6c6bc0d4697a1a376e3"}, - {file = "lazy_object_proxy-1.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bb03c507d96b65f617a6337dedd604399d35face2cdf01526b913fb50c4cb6e8"}, - {file = "lazy_object_proxy-1.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28c174db37946f94b97a97b579932ff88f07b8d73a46b6b93322b9ac06794a3b"}, - {file = "lazy_object_proxy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:d662f0669e27704495ff1f647070eb8816931231c44e583f4d0701b7adf6272f"}, - {file = "lazy_object_proxy-1.11.0-py3-none-any.whl", hash = "sha256:a56a5093d433341ff7da0e89f9b486031ccd222ec8e52ec84d0ec1cdc819674b"}, - {file = "lazy_object_proxy-1.11.0.tar.gz", hash = "sha256:18874411864c9fbbbaa47f9fc1dd7aea754c86cfde21278ef427639d1dd78e9c"}, -] - -[[package]] -name = "markupsafe" -version = "3.0.2" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.9" -groups = ["main", "docs"] -files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, -] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mistune" -version = "3.2.0" -description = "A sane and fast Markdown parser with useful plugins and renderers" -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "mistune-3.2.0-py3-none-any.whl", hash = "sha256:febdc629a3c78616b94393c6580551e0e34cc289987ec6c35ed3f4be42d0eee1"}, - {file = "mistune-3.2.0.tar.gz", hash = "sha256:708487c8a8cdd99c9d90eb3ed4c3ed961246ff78ac82f03418f5183ab70e398a"}, -] - -[package.dependencies] -typing-extensions = {version = "*", markers = "python_version < \"3.11\""} - -[[package]] -name = "more-itertools" -version = "10.7.0" -description = "More routines for operating on iterables, beyond itertools" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e"}, - {file = "more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.1.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, - {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, -] - -[[package]] -name = "nbclient" -version = "0.10.2" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -optional = false -python-versions = ">=3.9.0" -groups = ["docs"] -markers = "python_version == \"3.9\"" -files = [ - {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, - {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "mock", "moto", "myst-parser", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling", "testpath", "xmltodict"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbclient" -version = "0.10.4" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -optional = false -python-versions = ">=3.10.0" -groups = ["docs"] -markers = "python_version >= \"3.10\"" -files = [ - {file = "nbclient-0.10.4-py3-none-any.whl", hash = "sha256:9162df5a7373d70d606527300a95a975a47c137776cd942e52d9c7e29ff83440"}, - {file = "nbclient-0.10.4.tar.gz", hash = "sha256:1e54091b16e6da39e297b0ece3e10f6f29f4ac4e8ee515d29f8a7099bd6553c9"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1.3" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "mock", "moto", "myst-parser", "nbconvert (>=7.1.0)", "pytest (>=9.0.1,<10)", "pytest-asyncio (>=1.3.0)", "pytest-cov (>=4.0)", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling", "testpath", "xmltodict"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.1.0)", "pytest (>=9.0.1,<10)", "pytest-asyncio (>=1.3.0)", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.17.0" -description = "Convert Jupyter Notebooks (.ipynb files) to other formats." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -files = [ - {file = "nbconvert-7.17.0-py3-none-any.whl", hash = "sha256:4f99a63b337b9a23504347afdab24a11faa7d86b405e5c8f9881cd313336d518"}, - {file = "nbconvert-7.17.0.tar.gz", hash = "sha256:1b2696f1b5be12309f6c7d707c24af604b87dfaf6d950794c7b07acab96dda78"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = {version = "!=5.0.0", extras = ["css"]} -defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<4" -nbclient = ">=0.5.0" -nbformat = ">=5.7" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -traitlets = ">=5.1" - -[package.extras] -all = ["flaky", "intersphinx-registry", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (>=5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] -docs = ["intersphinx-registry", "ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (>=5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["pyqtwebengine (>=5.15)"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] -webpdf = ["playwright"] - -[[package]] -name = "nbformat" -version = "5.10.4" -description = "The Jupyter Notebook format" -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, - {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, -] - -[package.dependencies] -fastjsonschema = ">=2.15" -jsonschema = ">=2.6" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -traitlets = ">=5.1" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -groups = ["dev", "docs"] -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "nodeenv" -version = "1.10.0" -description = "Node.js virtual environment builder" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["docs"] -files = [ - {file = "nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827"}, - {file = "nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb"}, -] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version == \"3.9\"" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "numpy" -version = "2.2.6" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.10" -groups = ["main"] -markers = "python_version == \"3.10\"" -files = [ - {file = "numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb"}, - {file = "numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90"}, - {file = "numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163"}, - {file = "numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf"}, - {file = "numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83"}, - {file = "numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915"}, - {file = "numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680"}, - {file = "numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289"}, - {file = "numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d"}, - {file = "numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3"}, - {file = "numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae"}, - {file = "numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a"}, - {file = "numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42"}, - {file = "numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491"}, - {file = "numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a"}, - {file = "numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf"}, - {file = "numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1"}, - {file = "numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab"}, - {file = "numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47"}, - {file = "numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303"}, - {file = "numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff"}, - {file = "numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c"}, - {file = "numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3"}, - {file = "numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282"}, - {file = "numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87"}, - {file = "numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249"}, - {file = "numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49"}, - {file = "numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de"}, - {file = "numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4"}, - {file = "numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2"}, - {file = "numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84"}, - {file = "numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b"}, - {file = "numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d"}, - {file = "numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566"}, - {file = "numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f"}, - {file = "numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f"}, - {file = "numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868"}, - {file = "numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d"}, - {file = "numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd"}, - {file = "numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c"}, - {file = "numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6"}, - {file = "numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda"}, - {file = "numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40"}, - {file = "numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8"}, - {file = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f"}, - {file = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa"}, - {file = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571"}, - {file = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1"}, - {file = "numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff"}, - {file = "numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06"}, - {file = "numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d"}, - {file = "numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db"}, - {file = "numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543"}, - {file = "numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00"}, - {file = "numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd"}, -] - -[[package]] -name = "numpy" -version = "2.3.0" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "python_version >= \"3.11\"" -files = [ - {file = "numpy-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3c9fdde0fa18afa1099d6257eb82890ea4f3102847e692193b54e00312a9ae9"}, - {file = "numpy-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46d16f72c2192da7b83984aa5455baee640e33a9f1e61e656f29adf55e406c2b"}, - {file = "numpy-2.3.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a0be278be9307c4ab06b788f2a077f05e180aea817b3e41cebbd5aaf7bd85ed3"}, - {file = "numpy-2.3.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:99224862d1412d2562248d4710126355d3a8db7672170a39d6909ac47687a8a4"}, - {file = "numpy-2.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2393a914db64b0ead0ab80c962e42d09d5f385802006a6c87835acb1f58adb96"}, - {file = "numpy-2.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7729c8008d55e80784bd113787ce876ca117185c579c0d626f59b87d433ea779"}, - {file = "numpy-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:06d4fb37a8d383b769281714897420c5cc3545c79dc427df57fc9b852ee0bf58"}, - {file = "numpy-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c39ec392b5db5088259c68250e342612db82dc80ce044cf16496cf14cf6bc6f8"}, - {file = "numpy-2.3.0-cp311-cp311-win32.whl", hash = "sha256:ee9d3ee70d62827bc91f3ea5eee33153212c41f639918550ac0475e3588da59f"}, - {file = "numpy-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:43c55b6a860b0eb44d42341438b03513cf3879cb3617afb749ad49307e164edd"}, - {file = "numpy-2.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:2e6a1409eee0cb0316cb64640a49a49ca44deb1a537e6b1121dc7c458a1299a8"}, - {file = "numpy-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:389b85335838155a9076e9ad7f8fdba0827496ec2d2dc32ce69ce7898bde03ba"}, - {file = "numpy-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9498f60cd6bb8238d8eaf468a3d5bb031d34cd12556af53510f05fcf581c1b7e"}, - {file = "numpy-2.3.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:622a65d40d8eb427d8e722fd410ac3ad4958002f109230bc714fa551044ebae2"}, - {file = "numpy-2.3.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b9446d9d8505aadadb686d51d838f2b6688c9e85636a0c3abaeb55ed54756459"}, - {file = "numpy-2.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:50080245365d75137a2bf46151e975de63146ae6d79f7e6bd5c0e85c9931d06a"}, - {file = "numpy-2.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c24bb4113c66936eeaa0dc1e47c74770453d34f46ee07ae4efd853a2ed1ad10a"}, - {file = "numpy-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4d8d294287fdf685281e671886c6dcdf0291a7c19db3e5cb4178d07ccf6ecc67"}, - {file = "numpy-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6295f81f093b7f5769d1728a6bd8bf7466de2adfa771ede944ce6711382b89dc"}, - {file = "numpy-2.3.0-cp312-cp312-win32.whl", hash = "sha256:e6648078bdd974ef5d15cecc31b0c410e2e24178a6e10bf511e0557eed0f2570"}, - {file = "numpy-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:0898c67a58cdaaf29994bc0e2c65230fd4de0ac40afaf1584ed0b02cd74c6fdd"}, - {file = "numpy-2.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:bd8df082b6c4695753ad6193018c05aac465d634834dca47a3ae06d4bb22d9ea"}, - {file = "numpy-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5754ab5595bfa2c2387d241296e0381c21f44a4b90a776c3c1d39eede13a746a"}, - {file = "numpy-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d11fa02f77752d8099573d64e5fe33de3229b6632036ec08f7080f46b6649959"}, - {file = "numpy-2.3.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:aba48d17e87688a765ab1cd557882052f238e2f36545dfa8e29e6a91aef77afe"}, - {file = "numpy-2.3.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4dc58865623023b63b10d52f18abaac3729346a7a46a778381e0e3af4b7f3beb"}, - {file = "numpy-2.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:df470d376f54e052c76517393fa443758fefcdd634645bc9c1f84eafc67087f0"}, - {file = "numpy-2.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:87717eb24d4a8a64683b7a4e91ace04e2f5c7c77872f823f02a94feee186168f"}, - {file = "numpy-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fa264d56882b59dcb5ea4d6ab6f31d0c58a57b41aec605848b6eb2ef4a43e8"}, - {file = "numpy-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e651756066a0eaf900916497e20e02fe1ae544187cb0fe88de981671ee7f6270"}, - {file = "numpy-2.3.0-cp313-cp313-win32.whl", hash = "sha256:e43c3cce3b6ae5f94696669ff2a6eafd9a6b9332008bafa4117af70f4b88be6f"}, - {file = "numpy-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:81ae0bf2564cf475f94be4a27ef7bcf8af0c3e28da46770fc904da9abd5279b5"}, - {file = "numpy-2.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:c8738baa52505fa6e82778580b23f945e3578412554d937093eac9205e845e6e"}, - {file = "numpy-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:39b27d8b38942a647f048b675f134dd5a567f95bfff481f9109ec308515c51d8"}, - {file = "numpy-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0eba4a1ea88f9a6f30f56fdafdeb8da3774349eacddab9581a21234b8535d3d3"}, - {file = "numpy-2.3.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0f1f11d0a1da54927436505a5a7670b154eac27f5672afc389661013dfe3d4f"}, - {file = "numpy-2.3.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:690d0a5b60a47e1f9dcec7b77750a4854c0d690e9058b7bef3106e3ae9117808"}, - {file = "numpy-2.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:8b51ead2b258284458e570942137155978583e407babc22e3d0ed7af33ce06f8"}, - {file = "numpy-2.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:aaf81c7b82c73bd9b45e79cfb9476cb9c29e937494bfe9092c26aece812818ad"}, - {file = "numpy-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f420033a20b4f6a2a11f585f93c843ac40686a7c3fa514060a97d9de93e5e72b"}, - {file = "numpy-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d344ca32ab482bcf8735d8f95091ad081f97120546f3d250240868430ce52555"}, - {file = "numpy-2.3.0-cp313-cp313t-win32.whl", hash = "sha256:48a2e8eaf76364c32a1feaa60d6925eaf32ed7a040183b807e02674305beef61"}, - {file = "numpy-2.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ba17f93a94e503551f154de210e4d50c5e3ee20f7e7a1b5f6ce3f22d419b93bb"}, - {file = "numpy-2.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f14e016d9409680959691c109be98c436c6249eaf7f118b424679793607b5944"}, - {file = "numpy-2.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80b46117c7359de8167cc00a2c7d823bdd505e8c7727ae0871025a86d668283b"}, - {file = "numpy-2.3.0-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:5814a0f43e70c061f47abd5857d120179609ddc32a613138cbb6c4e9e2dbdda5"}, - {file = "numpy-2.3.0-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ef6c1e88fd6b81ac6d215ed71dc8cd027e54d4bf1d2682d362449097156267a2"}, - {file = "numpy-2.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33a5a12a45bb82d9997e2c0b12adae97507ad7c347546190a18ff14c28bbca12"}, - {file = "numpy-2.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:54dfc8681c1906d239e95ab1508d0a533c4a9505e52ee2d71a5472b04437ef97"}, - {file = "numpy-2.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e017a8a251ff4d18d71f139e28bdc7c31edba7a507f72b1414ed902cbe48c74d"}, - {file = "numpy-2.3.0.tar.gz", hash = "sha256:581f87f9e9e9db2cba2141400e160e9dd644ee248788d6f90636eeb8fd9260a6"}, -] - -[[package]] -name = "openapi-core" -version = "0.16.0" -description = "client-side and server-side support for the OpenAPI Specification v3" -optional = false -python-versions = ">=3.7.0,<4.0.0" -groups = ["main"] -files = [ - {file = "openapi-core-0.16.0.tar.gz", hash = "sha256:5db8fa034e5c262de865cab5f2344995c52f1ba0386182c0be584d02f0282c6a"}, - {file = "openapi_core-0.16.0-py3-none-any.whl", hash = "sha256:4331f528f5a74c7a3963f37b2ad73c54e3dd477276354fd6b7188d2352fd7e8e"}, -] - -[package.dependencies] -isodate = "*" -jsonschema-spec = ">=0.1.1,<0.2.0" -more-itertools = "*" -openapi-schema-validator = ">=0.3.0,<0.4.0" -openapi-spec-validator = ">=0.5.0,<0.6.0" -parse = "*" -pathable = ">=0.4.0,<0.5.0" -typing-extensions = ">=4.3.0,<5.0.0" -werkzeug = "*" - -[package.extras] -django = ["django (>=3.0)"] -falcon = ["falcon (>=3.0)"] -flask = ["flask"] -requests = ["requests"] - -[[package]] -name = "openapi-schema-validator" -version = "0.3.4" -description = "OpenAPI schema validation for Python" -optional = false -python-versions = ">=3.7.0,<4.0.0" -groups = ["main"] -files = [ - {file = "openapi-schema-validator-0.3.4.tar.gz", hash = "sha256:7cf27585dd7970b7257cefe48e1a3a10d4e34421831bdb472d96967433bc27bd"}, - {file = "openapi_schema_validator-0.3.4-py3-none-any.whl", hash = "sha256:34fbd14b7501abe25e64d7b4624a9db02cde1a578d285b3da6f34b290cdf0b3a"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -jsonschema = ">=4.0.0,<5.0.0" - -[package.extras] -isodate = ["isodate"] -rfc3339-validator = ["rfc3339-validator"] -strict-rfc3339 = ["strict-rfc3339"] - -[[package]] -name = "openapi-spec-validator" -version = "0.5.4" -description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" -optional = false -python-versions = ">=3.7.0,<4.0.0" -groups = ["main"] -files = [ - {file = "openapi_spec_validator-0.5.4-py3-none-any.whl", hash = "sha256:96be4258fdccc89d3da094738e19d56b94956914b93a22de795b9dd220cb4c7c"}, - {file = "openapi_spec_validator-0.5.4.tar.gz", hash = "sha256:68654e81cc56c71392dba31bf55d11e1c03c99458bebcb0018959a7134e104da"}, -] - -[package.dependencies] -jsonschema = ">=4.0.0,<5.0.0" -jsonschema-spec = ">=0.1.1,<0.2.0" -lazy-object-proxy = ">=1.7.1,<2.0.0" -openapi-schema-validator = ">=0.3.2,<0.5" - -[package.extras] -requests = ["requests"] - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -groups = ["docs"] -markers = "python_version < \"3.12\"" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "25.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, -] - -[[package]] -name = "pandas" -version = "2.3.0" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "pandas-2.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:625466edd01d43b75b1883a64d859168e4556261a5035b32f9d743b67ef44634"}, - {file = "pandas-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6872d695c896f00df46b71648eea332279ef4077a409e2fe94220208b6bb675"}, - {file = "pandas-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4dd97c19bd06bc557ad787a15b6489d2614ddaab5d104a0310eb314c724b2d2"}, - {file = "pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:034abd6f3db8b9880aaee98f4f5d4dbec7c4829938463ec046517220b2f8574e"}, - {file = "pandas-2.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23c2b2dc5213810208ca0b80b8666670eb4660bbfd9d45f58592cc4ddcfd62e1"}, - {file = "pandas-2.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:39ff73ec07be5e90330cc6ff5705c651ace83374189dcdcb46e6ff54b4a72cd6"}, - {file = "pandas-2.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:40cecc4ea5abd2921682b57532baea5588cc5f80f0231c624056b146887274d2"}, - {file = "pandas-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8adff9f138fc614347ff33812046787f7d43b3cef7c0f0171b3340cae333f6ca"}, - {file = "pandas-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e5f08eb9a445d07720776df6e641975665c9ea12c9d8a331e0f6890f2dcd76ef"}, - {file = "pandas-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa35c266c8cd1a67d75971a1912b185b492d257092bdd2709bbdebe574ed228d"}, - {file = "pandas-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a0cc77b0f089d2d2ffe3007db58f170dae9b9f54e569b299db871a3ab5bf46"}, - {file = "pandas-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c06f6f144ad0a1bf84699aeea7eff6068ca5c63ceb404798198af7eb86082e33"}, - {file = "pandas-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed16339bc354a73e0a609df36d256672c7d296f3f767ac07257801aa064ff73c"}, - {file = "pandas-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:fa07e138b3f6c04addfeaf56cc7fdb96c3b68a3fe5e5401251f231fce40a0d7a"}, - {file = "pandas-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2eb4728a18dcd2908c7fccf74a982e241b467d178724545a48d0caf534b38ebf"}, - {file = "pandas-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9d8c3187be7479ea5c3d30c32a5d73d62a621166675063b2edd21bc47614027"}, - {file = "pandas-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ff730713d4c4f2f1c860e36c005c7cefc1c7c80c21c0688fd605aa43c9fcf09"}, - {file = "pandas-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba24af48643b12ffe49b27065d3babd52702d95ab70f50e1b34f71ca703e2c0d"}, - {file = "pandas-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:404d681c698e3c8a40a61d0cd9412cc7364ab9a9cc6e144ae2992e11a2e77a20"}, - {file = "pandas-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6021910b086b3ca756755e86ddc64e0ddafd5e58e076c72cb1585162e5ad259b"}, - {file = "pandas-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:094e271a15b579650ebf4c5155c05dcd2a14fd4fdd72cf4854b2f7ad31ea30be"}, - {file = "pandas-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c7e2fc25f89a49a11599ec1e76821322439d90820108309bf42130d2f36c983"}, - {file = "pandas-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6da97aeb6a6d233fb6b17986234cc723b396b50a3c6804776351994f2a658fd"}, - {file = "pandas-2.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb32dc743b52467d488e7a7c8039b821da2826a9ba4f85b89ea95274f863280f"}, - {file = "pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213cd63c43263dbb522c1f8a7c9d072e25900f6975596f883f4bebd77295d4f3"}, - {file = "pandas-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d2b33e68d0ce64e26a4acc2e72d747292084f4e8db4c847c6f5f6cbe56ed6d8"}, - {file = "pandas-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:430a63bae10b5086995db1b02694996336e5a8ac9a96b4200572b413dfdfccb9"}, - {file = "pandas-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4930255e28ff5545e2ca404637bcc56f031893142773b3468dc021c6c32a1390"}, - {file = "pandas-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f925f1ef673b4bd0271b1809b72b3270384f2b7d9d14a189b12b7fc02574d575"}, - {file = "pandas-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78ad363ddb873a631e92a3c063ade1ecfb34cae71e9a2be6ad100f875ac1042"}, - {file = "pandas-2.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951805d146922aed8357e4cc5671b8b0b9be1027f0619cea132a9f3f65f2f09c"}, - {file = "pandas-2.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a881bc1309f3fce34696d07b00f13335c41f5f5a8770a33b09ebe23261cfc67"}, - {file = "pandas-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1991bbb96f4050b09b5f811253c4f3cf05ee89a589379aa36cd623f21a31d6f"}, - {file = "pandas-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bb3be958022198531eb7ec2008cfc78c5b1eed51af8600c6c5d9160d89d8d249"}, - {file = "pandas-2.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9efc0acbbffb5236fbdf0409c04edce96bec4bdaa649d49985427bd1ec73e085"}, - {file = "pandas-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75651c14fde635e680496148a8526b328e09fe0572d9ae9b638648c46a544ba3"}, - {file = "pandas-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5be867a0541a9fb47a4be0c5790a4bccd5b77b92f0a59eeec9375fafc2aa14"}, - {file = "pandas-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84141f722d45d0c2a89544dd29d35b3abfc13d2250ed7e68394eda7564bd6324"}, - {file = "pandas-2.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f95a2aef32614ed86216d3c450ab12a4e82084e8102e355707a1d96e33d51c34"}, - {file = "pandas-2.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e0f51973ba93a9f97185049326d75b942b9aeb472bec616a129806facb129ebb"}, - {file = "pandas-2.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b198687ca9c8529662213538a9bb1e60fa0bf0f6af89292eb68fea28743fcd5a"}, - {file = "pandas-2.3.0.tar.gz", hash = "sha256:34600ab34ebf1131a7613a260a61dbe8b62c188ec0ea4c296da7c9a06b004133"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -description = "Utilities for writing pandoc filters in python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["docs"] -files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, -] - -[[package]] -name = "parse" -version = "1.20.2" -description = "parse() is the opposite of format()" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558"}, - {file = "parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce"}, -] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -groups = ["dev", "docs"] -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "pathable" -version = "0.4.4" -description = "Object-oriented paths" -optional = false -python-versions = "<4.0.0,>=3.7.0" -groups = ["main"] -files = [ - {file = "pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2"}, - {file = "pathable-0.4.4.tar.gz", hash = "sha256:6905a3cd17804edfac7875b5f6c9142a218c7caef78693c2dbbbfbac186d88b2"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -groups = ["dev", "docs"] -markers = "python_version == \"3.9\" and sys_platform != \"win32\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "platformdirs" -version = "4.3.8" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -files = [ - {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, - {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] - -[[package]] -name = "pluggy" -version = "1.6.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, - {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["coverage", "pytest", "pytest-benchmark"] - -[[package]] -name = "prometheus-client" -version = "0.24.1" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -files = [ - {file = "prometheus_client-0.24.1-py3-none-any.whl", hash = "sha256:150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055"}, - {file = "prometheus_client-0.24.1.tar.gz", hash = "sha256:7e0ced7fbbd40f7b84962d5d2ab6f17ef88a72504dcf7c0b40737b43b2a461f9"}, -] - -[package.extras] -aiohttp = ["aiohttp"] -django = ["django"] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.51" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07"}, - {file = "prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "psutil" -version = "7.0.0" -description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." -optional = false -python-versions = ">=3.6" -groups = ["dev", "docs"] -files = [ - {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, - {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, - {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, - {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, - {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, - {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, - {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, -] - -[package.extras] -dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] -test = ["pytest", "pytest-xdist", "setuptools"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -groups = ["dev", "docs"] -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] -markers = {dev = "python_version == \"3.9\" and sys_platform != \"win32\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"", docs = "sys_platform != \"win32\" and python_version == \"3.9\" or sys_platform != \"win32\" and sys_platform != \"emscripten\" or os_name != \"nt\""} - -[[package]] -name = "pure-eval" -version = "0.2.3" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -groups = ["dev", "docs"] -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -groups = ["main", "dev", "docs"] -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] -markers = {main = "platform_python_implementation != \"PyPy\"", dev = "implementation_name == \"pypy\""} - -[[package]] -name = "pygments" -version = "2.19.1" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyjwt" -version = "2.10.1" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, - {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, -] - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pymysql" -version = "1.1.1" -description = "Pure Python MySQL Driver" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"}, - {file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"}, -] - -[package.extras] -ed25519 = ["PyNaCl (>=1.4.0)"] -rsa = ["cryptography"] - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "dev", "docs"] -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.1.0" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"}, - {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-json-logger" -version = "4.0.0" -description = "JSON Log Formatter for the Python Logging Package" -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2"}, - {file = "python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f"}, -] - -[package.dependencies] -typing_extensions = {version = "*", markers = "python_version < \"3.10\""} - -[package.extras] -dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] - -[[package]] -name = "pytz" -version = "2025.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, - {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, -] - -[[package]] -name = "pywin32" -version = "310" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -groups = ["dev", "docs"] -markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" -files = [ - {file = "pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1"}, - {file = "pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d"}, - {file = "pywin32-310-cp310-cp310-win_arm64.whl", hash = "sha256:33babed0cf0c92a6f94cc6cc13546ab24ee13e3e800e61ed87609ab91e4c8213"}, - {file = "pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd"}, - {file = "pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c"}, - {file = "pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582"}, - {file = "pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d"}, - {file = "pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060"}, - {file = "pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966"}, - {file = "pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab"}, - {file = "pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e"}, - {file = "pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33"}, - {file = "pywin32-310-cp38-cp38-win32.whl", hash = "sha256:0867beb8addefa2e3979d4084352e4ac6e991ca45373390775f7084cc0209b9c"}, - {file = "pywin32-310-cp38-cp38-win_amd64.whl", hash = "sha256:30f0a9b3138fb5e07eb4973b7077e1883f558e40c578c6925acc7a94c34eaa36"}, - {file = "pywin32-310-cp39-cp39-win32.whl", hash = "sha256:851c8d927af0d879221e616ae1f66145253537bbdd321a77e8ef701b443a9a1a"}, - {file = "pywin32-310-cp39-cp39-win_amd64.whl", hash = "sha256:96867217335559ac619f00ad70e513c0fcf84b8a3af9fc2bba3b59b97da70475"}, -] - -[[package]] -name = "pywinpty" -version = "3.0.3" -description = "Pseudo terminal support for Windows from Python." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -markers = "os_name == \"nt\"" -files = [ - {file = "pywinpty-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:ff05f12d775b142b11c6fe085129bdd759b61cf7d41da6c745e78e3a1ef5bf40"}, - {file = "pywinpty-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:340ccacb4d74278a631923794ccd758471cfc8eeeeee4610b280420a17ad1e82"}, - {file = "pywinpty-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:dff25a9a6435f527d7c65608a7e62783fc12076e7d44487a4911ee91be5a8ac8"}, - {file = "pywinpty-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:fbc1e230e5b193eef4431cba3f39996a288f9958f9c9f092c8a961d930ee8f68"}, - {file = "pywinpty-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:c9081df0e49ffa86d15db4a6ba61530630e48707f987df42c9d3313537e81fc0"}, - {file = "pywinpty-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:15e79d870e18b678fb8a5a6105fd38496b55697c66e6fc0378236026bc4d59e9"}, - {file = "pywinpty-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9c91dbb026050c77bdcef964e63a4f10f01a639113c4d3658332614544c467ab"}, - {file = "pywinpty-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:fe1f7911805127c94cf51f89ab14096c6f91ffdcacf993d2da6082b2142a2523"}, - {file = "pywinpty-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:3f07a6cf1c1d470d284e614733c3d0f726d2c85e78508ea10a403140c3c0c18a"}, - {file = "pywinpty-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:15c7c0b6f8e9d87aabbaff76468dabf6e6121332c40fc1d83548d02a9d6a3759"}, - {file = "pywinpty-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:d4b6b7b0fe0cdcd02e956bd57cfe9f4e5a06514eecf3b5ae174da4f951b58be9"}, - {file = "pywinpty-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:34789d685fc0d547ce0c8a65e5a70e56f77d732fa6e03c8f74fefb8cbb252019"}, - {file = "pywinpty-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:0c37e224a47a971d1a6e08649a1714dac4f63c11920780977829ed5c8cadead1"}, - {file = "pywinpty-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c4e9c3dff7d86ba81937438d5819f19f385a39d8f592d4e8af67148ceb4f6ab5"}, - {file = "pywinpty-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:0f10e81d52d7f2c4d927f645f247028e64eaf205a3ed9e64dbd998122108a218"}, - {file = "pywinpty-3.0.3.tar.gz", hash = "sha256:523441dc34d231fb361b4b00f8c99d3f16de02f5005fd544a0183112bcc22412"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -groups = ["main", "docs"] -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "pyzmq" -version = "26.4.0" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "pyzmq-26.4.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:0329bdf83e170ac133f44a233fc651f6ed66ef8e66693b5af7d54f45d1ef5918"}, - {file = "pyzmq-26.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:398a825d2dea96227cf6460ce0a174cf7657d6f6827807d4d1ae9d0f9ae64315"}, - {file = "pyzmq-26.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d52d62edc96787f5c1dfa6c6ccff9b581cfae5a70d94ec4c8da157656c73b5b"}, - {file = "pyzmq-26.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1410c3a3705db68d11eb2424d75894d41cff2f64d948ffe245dd97a9debfebf4"}, - {file = "pyzmq-26.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7dacb06a9c83b007cc01e8e5277f94c95c453c5851aac5e83efe93e72226353f"}, - {file = "pyzmq-26.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6bab961c8c9b3a4dc94d26e9b2cdf84de9918931d01d6ff38c721a83ab3c0ef5"}, - {file = "pyzmq-26.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a5c09413b924d96af2aa8b57e76b9b0058284d60e2fc3730ce0f979031d162a"}, - {file = "pyzmq-26.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d489ac234d38e57f458fdbd12a996bfe990ac028feaf6f3c1e81ff766513d3b"}, - {file = "pyzmq-26.4.0-cp310-cp310-win32.whl", hash = "sha256:dea1c8db78fb1b4b7dc9f8e213d0af3fc8ecd2c51a1d5a3ca1cde1bda034a980"}, - {file = "pyzmq-26.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:fa59e1f5a224b5e04dc6c101d7186058efa68288c2d714aa12d27603ae93318b"}, - {file = "pyzmq-26.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:a651fe2f447672f4a815e22e74630b6b1ec3a1ab670c95e5e5e28dcd4e69bbb5"}, - {file = "pyzmq-26.4.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:bfcf82644c9b45ddd7cd2a041f3ff8dce4a0904429b74d73a439e8cab1bd9e54"}, - {file = "pyzmq-26.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9bcae3979b2654d5289d3490742378b2f3ce804b0b5fd42036074e2bf35b030"}, - {file = "pyzmq-26.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccdff8ac4246b6fb60dcf3982dfaeeff5dd04f36051fe0632748fc0aa0679c01"}, - {file = "pyzmq-26.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4550af385b442dc2d55ab7717837812799d3674cb12f9a3aa897611839c18e9e"}, - {file = "pyzmq-26.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f7ffe9db1187a253fca95191854b3fda24696f086e8789d1d449308a34b88"}, - {file = "pyzmq-26.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3709c9ff7ba61589b7372923fd82b99a81932b592a5c7f1a24147c91da9a68d6"}, - {file = "pyzmq-26.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f8f3c30fb2d26ae5ce36b59768ba60fb72507ea9efc72f8f69fa088450cff1df"}, - {file = "pyzmq-26.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:382a4a48c8080e273427fc692037e3f7d2851959ffe40864f2db32646eeb3cef"}, - {file = "pyzmq-26.4.0-cp311-cp311-win32.whl", hash = "sha256:d56aad0517d4c09e3b4f15adebba8f6372c5102c27742a5bdbfc74a7dceb8fca"}, - {file = "pyzmq-26.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:963977ac8baed7058c1e126014f3fe58b3773f45c78cce7af5c26c09b6823896"}, - {file = "pyzmq-26.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0c8e8cadc81e44cc5088fcd53b9b3b4ce9344815f6c4a03aec653509296fae3"}, - {file = "pyzmq-26.4.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5227cb8da4b6f68acfd48d20c588197fd67745c278827d5238c707daf579227b"}, - {file = "pyzmq-26.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1c07a7fa7f7ba86554a2b1bef198c9fed570c08ee062fd2fd6a4dcacd45f905"}, - {file = "pyzmq-26.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae775fa83f52f52de73183f7ef5395186f7105d5ed65b1ae65ba27cb1260de2b"}, - {file = "pyzmq-26.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c760d0226ebd52f1e6b644a9e839b5db1e107a23f2fcd46ec0569a4fdd4e63"}, - {file = "pyzmq-26.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ef8c6ecc1d520debc147173eaa3765d53f06cd8dbe7bd377064cdbc53ab456f5"}, - {file = "pyzmq-26.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3150ef4084e163dec29ae667b10d96aad309b668fac6810c9e8c27cf543d6e0b"}, - {file = "pyzmq-26.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4448c9e55bf8329fa1dcedd32f661bf611214fa70c8e02fee4347bc589d39a84"}, - {file = "pyzmq-26.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e07dde3647afb084d985310d067a3efa6efad0621ee10826f2cb2f9a31b89d2f"}, - {file = "pyzmq-26.4.0-cp312-cp312-win32.whl", hash = "sha256:ba034a32ecf9af72adfa5ee383ad0fd4f4e38cdb62b13624278ef768fe5b5b44"}, - {file = "pyzmq-26.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:056a97aab4064f526ecb32f4343917a4022a5d9efb6b9df990ff72e1879e40be"}, - {file = "pyzmq-26.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:2f23c750e485ce1eb639dbd576d27d168595908aa2d60b149e2d9e34c9df40e0"}, - {file = "pyzmq-26.4.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:c43fac689880f5174d6fc864857d1247fe5cfa22b09ed058a344ca92bf5301e3"}, - {file = "pyzmq-26.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:902aca7eba477657c5fb81c808318460328758e8367ecdd1964b6330c73cae43"}, - {file = "pyzmq-26.4.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e48a830bfd152fe17fbdeaf99ac5271aa4122521bf0d275b6b24e52ef35eb6"}, - {file = "pyzmq-26.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31be2b6de98c824c06f5574331f805707c667dc8f60cb18580b7de078479891e"}, - {file = "pyzmq-26.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6332452034be001bbf3206ac59c0d2a7713de5f25bb38b06519fc6967b7cf771"}, - {file = "pyzmq-26.4.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:da8c0f5dd352136853e6a09b1b986ee5278dfddfebd30515e16eae425c872b30"}, - {file = "pyzmq-26.4.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f4ccc1a0a2c9806dda2a2dd118a3b7b681e448f3bb354056cad44a65169f6d86"}, - {file = "pyzmq-26.4.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c0b5fceadbab461578daf8d1dcc918ebe7ddd2952f748cf30c7cf2de5d51101"}, - {file = "pyzmq-26.4.0-cp313-cp313-win32.whl", hash = "sha256:28e2b0ff5ba4b3dd11062d905682bad33385cfa3cc03e81abd7f0822263e6637"}, - {file = "pyzmq-26.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:23ecc9d241004c10e8b4f49d12ac064cd7000e1643343944a10df98e57bc544b"}, - {file = "pyzmq-26.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:1edb0385c7f025045d6e0f759d4d3afe43c17a3d898914ec6582e6f464203c08"}, - {file = "pyzmq-26.4.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:93a29e882b2ba1db86ba5dd5e88e18e0ac6b627026c5cfbec9983422011b82d4"}, - {file = "pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45684f276f57110bb89e4300c00f1233ca631f08f5f42528a5c408a79efc4a"}, - {file = "pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72073e75260cb301aad4258ad6150fa7f57c719b3f498cb91e31df16784d89b"}, - {file = "pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be37e24b13026cfedd233bcbbccd8c0bcd2fdd186216094d095f60076201538d"}, - {file = "pyzmq-26.4.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:237b283044934d26f1eeff4075f751b05d2f3ed42a257fc44386d00df6a270cf"}, - {file = "pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:b30f862f6768b17040929a68432c8a8be77780317f45a353cb17e423127d250c"}, - {file = "pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:c80fcd3504232f13617c6ab501124d373e4895424e65de8b72042333316f64a8"}, - {file = "pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:26a2a7451606b87f67cdeca2c2789d86f605da08b4bd616b1a9981605ca3a364"}, - {file = "pyzmq-26.4.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:831cc53bf6068d46d942af52fa8b0b9d128fb39bcf1f80d468dc9a3ae1da5bfb"}, - {file = "pyzmq-26.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:51d18be6193c25bd229524cfac21e39887c8d5e0217b1857998dfbef57c070a4"}, - {file = "pyzmq-26.4.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:445c97854204119ae2232503585ebb4fa7517142f71092cb129e5ee547957a1f"}, - {file = "pyzmq-26.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:807b8f4ad3e6084412c0f3df0613269f552110fa6fb91743e3e306223dbf11a6"}, - {file = "pyzmq-26.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c01d109dd675ac47fa15c0a79d256878d898f90bc10589f808b62d021d2e653c"}, - {file = "pyzmq-26.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0a294026e28679a8dd64c922e59411cb586dad307661b4d8a5c49e7bbca37621"}, - {file = "pyzmq-26.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:22c8dd677274af8dfb1efd05006d6f68fb2f054b17066e308ae20cb3f61028cf"}, - {file = "pyzmq-26.4.0-cp38-cp38-win32.whl", hash = "sha256:14fc678b696bc42c14e2d7f86ac4e97889d5e6b94d366ebcb637a768d2ad01af"}, - {file = "pyzmq-26.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1ef0a536662bbbdc8525f7e2ef19e74123ec9c4578e0582ecd41aedc414a169"}, - {file = "pyzmq-26.4.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:a88643de8abd000ce99ca72056a1a2ae15881ee365ecb24dd1d9111e43d57842"}, - {file = "pyzmq-26.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a744ce209ecb557406fb928f3c8c55ce79b16c3eeb682da38ef5059a9af0848"}, - {file = "pyzmq-26.4.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9434540f333332224ecb02ee6278b6c6f11ea1266b48526e73c903119b2f420f"}, - {file = "pyzmq-26.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c6f0a23e55cd38d27d4c89add963294ea091ebcb104d7fdab0f093bc5abb1c"}, - {file = "pyzmq-26.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6145df55dc2309f6ef72d70576dcd5aabb0fd373311613fe85a5e547c722b780"}, - {file = "pyzmq-26.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2ea81823840ef8c56e5d2f9918e4d571236294fea4d1842b302aebffb9e40997"}, - {file = "pyzmq-26.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc2abc385dc37835445abe206524fbc0c9e3fce87631dfaa90918a1ba8f425eb"}, - {file = "pyzmq-26.4.0-cp39-cp39-win32.whl", hash = "sha256:41a2508fe7bed4c76b4cf55aacfb8733926f59d440d9ae2b81ee8220633b4d12"}, - {file = "pyzmq-26.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4000e8255d6cbce38982e5622ebb90823f3409b7ffe8aeae4337ef7d6d2612a"}, - {file = "pyzmq-26.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f6919d9c120488246bdc2a2f96662fa80d67b35bd6d66218f457e722b3ff64"}, - {file = "pyzmq-26.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:98d948288ce893a2edc5ec3c438fe8de2daa5bbbd6e2e865ec5f966e237084ba"}, - {file = "pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9f34f5c9e0203ece706a1003f1492a56c06c0632d86cb77bcfe77b56aacf27b"}, - {file = "pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80c9b48aef586ff8b698359ce22f9508937c799cc1d2c9c2f7c95996f2300c94"}, - {file = "pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f2a5b74009fd50b53b26f65daff23e9853e79aa86e0aa08a53a7628d92d44a"}, - {file = "pyzmq-26.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:61c5f93d7622d84cb3092d7f6398ffc77654c346545313a3737e266fc11a3beb"}, - {file = "pyzmq-26.4.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4478b14cb54a805088299c25a79f27eaf530564a7a4f72bf432a040042b554eb"}, - {file = "pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a28ac29c60e4ba84b5f58605ace8ad495414a724fe7aceb7cf06cd0598d04e1"}, - {file = "pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43b03c1ceea27c6520124f4fb2ba9c647409b9abdf9a62388117148a90419494"}, - {file = "pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7731abd23a782851426d4e37deb2057bf9410848a4459b5ede4fe89342e687a9"}, - {file = "pyzmq-26.4.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a222ad02fbe80166b0526c038776e8042cd4e5f0dec1489a006a1df47e9040e0"}, - {file = "pyzmq-26.4.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:91c3ffaea475ec8bb1a32d77ebc441dcdd13cd3c4c284a6672b92a0f5ade1917"}, - {file = "pyzmq-26.4.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d9a78a52668bf5c9e7b0da36aa5760a9fc3680144e1445d68e98df78a25082ed"}, - {file = "pyzmq-26.4.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b70cab356ff8c860118b89dc86cd910c73ce2127eb986dada4fbac399ef644cf"}, - {file = "pyzmq-26.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acae207d4387780838192326b32d373bb286da0b299e733860e96f80728eb0af"}, - {file = "pyzmq-26.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f928eafd15794aa4be75463d537348b35503c1e014c5b663f206504ec1a90fe4"}, - {file = "pyzmq-26.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:552b0d2e39987733e1e9e948a0ced6ff75e0ea39ab1a1db2fc36eb60fd8760db"}, - {file = "pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd670a8aa843f2ee637039bbd412e0d7294a5e588e1ecc9ad98b0cdc050259a4"}, - {file = "pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d367b7b775a0e1e54a59a2ba3ed4d5e0a31566af97cc9154e34262777dab95ed"}, - {file = "pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112af16c406e4a93df2caef49f884f4c2bb2b558b0b5577ef0b2465d15c1abc"}, - {file = "pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c76c298683f82669cab0b6da59071f55238c039738297c69f187a542c6d40099"}, - {file = "pyzmq-26.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:49b6ca2e625b46f499fb081aaf7819a177f41eeb555acb05758aa97f4f95d147"}, - {file = "pyzmq-26.4.0.tar.gz", hash = "sha256:4bd13f85f80962f91a651a7356fe0472791a5f7a92f227822b5acf44795c626d"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "referencing" -version = "0.36.2" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.9" -groups = ["main", "docs"] -files = [ - {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, - {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" -typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} - -[[package]] -name = "requests" -version = "2.32.4" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, - {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset_normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -groups = ["docs"] -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -groups = ["docs"] -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] - -[[package]] -name = "rpds-py" -version = "0.25.1" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.9" -groups = ["main", "docs"] -files = [ - {file = "rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9"}, - {file = "rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:605ffe7769e24b1800b4d024d24034405d9404f0bc2f55b6db3362cd34145a6f"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc6f3ddef93243538be76f8e47045b4aad7a66a212cd3a0f23e34469473d36b"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f70316f760174ca04492b5ab01be631a8ae30cadab1d1081035136ba12738cfa"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1dafef8df605fdb46edcc0bf1573dea0d6d7b01ba87f85cd04dc855b2b4479e"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0701942049095741a8aeb298a31b203e735d1c61f4423511d2b1a41dcd8a16da"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e87798852ae0b37c88babb7f7bbbb3e3fecc562a1c340195b44c7e24d403e380"}, - {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3bcce0edc1488906c2d4c75c94c70a0417e83920dd4c88fec1078c94843a6ce9"}, - {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e2f6a2347d3440ae789505693a02836383426249d5293541cd712e07e7aecf54"}, - {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4fd52d3455a0aa997734f3835cbc4c9f32571345143960e7d7ebfe7b5fbfa3b2"}, - {file = "rpds_py-0.25.1-cp310-cp310-win32.whl", hash = "sha256:3f0b1798cae2bbbc9b9db44ee068c556d4737911ad53a4e5093d09d04b3bbc24"}, - {file = "rpds_py-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:3ebd879ab996537fc510a2be58c59915b5dd63bccb06d1ef514fee787e05984a"}, - {file = "rpds_py-0.25.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5f048bbf18b1f9120685c6d6bb70cc1a52c8cc11bdd04e643d28d3be0baf666d"}, - {file = "rpds_py-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fbb0dbba559959fcb5d0735a0f87cdbca9e95dac87982e9b95c0f8f7ad10255"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4ca54b9cf9d80b4016a67a0193ebe0bcf29f6b0a96f09db942087e294d3d4c2"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee3e26eb83d39b886d2cb6e06ea701bba82ef30a0de044d34626ede51ec98b0"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89706d0683c73a26f76a5315d893c051324d771196ae8b13e6ffa1ffaf5e574f"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2013ee878c76269c7b557a9a9c042335d732e89d482606990b70a839635feb7"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45e484db65e5380804afbec784522de84fa95e6bb92ef1bd3325d33d13efaebd"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:48d64155d02127c249695abb87d39f0faf410733428d499867606be138161d65"}, - {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:048893e902132fd6548a2e661fb38bf4896a89eea95ac5816cf443524a85556f"}, - {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0317177b1e8691ab5879f4f33f4b6dc55ad3b344399e23df2e499de7b10a548d"}, - {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffcf57826d77a4151962bf1701374e0fc87f536e56ec46f1abdd6a903354042"}, - {file = "rpds_py-0.25.1-cp311-cp311-win32.whl", hash = "sha256:cda776f1967cb304816173b30994faaf2fd5bcb37e73118a47964a02c348e1bc"}, - {file = "rpds_py-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:dc3c1ff0abc91444cd20ec643d0f805df9a3661fcacf9c95000329f3ddf268a4"}, - {file = "rpds_py-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:5a3ddb74b0985c4387719fc536faced33cadf2172769540c62e2a94b7b9be1c4"}, - {file = "rpds_py-0.25.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5ffe453cde61f73fea9430223c81d29e2fbf412a6073951102146c84e19e34c"}, - {file = "rpds_py-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:115874ae5e2fdcfc16b2aedc95b5eef4aebe91b28e7e21951eda8a5dc0d3461b"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a714bf6e5e81b0e570d01f56e0c89c6375101b8463999ead3a93a5d2a4af91fa"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35634369325906bcd01577da4c19e3b9541a15e99f31e91a02d010816b49bfda"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4cb2b3ddc16710548801c6fcc0cfcdeeff9dafbc983f77265877793f2660309"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ceca1cf097ed77e1a51f1dbc8d174d10cb5931c188a4505ff9f3e119dfe519b"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2cd1a4b0c2b8c5e31ffff50d09f39906fe351389ba143c195566056c13a7ea"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de336a4b164c9188cb23f3703adb74a7623ab32d20090d0e9bf499a2203ad65"}, - {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9fca84a15333e925dd59ce01da0ffe2ffe0d6e5d29a9eeba2148916d1824948c"}, - {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88ec04afe0c59fa64e2f6ea0dd9657e04fc83e38de90f6de201954b4d4eb59bd"}, - {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8bd2f19e312ce3e1d2c635618e8a8d8132892bb746a7cf74780a489f0f6cdcb"}, - {file = "rpds_py-0.25.1-cp312-cp312-win32.whl", hash = "sha256:e5e2f7280d8d0d3ef06f3ec1b4fd598d386cc6f0721e54f09109a8132182fbfe"}, - {file = "rpds_py-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:db58483f71c5db67d643857404da360dce3573031586034b7d59f245144cc192"}, - {file = "rpds_py-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:6d50841c425d16faf3206ddbba44c21aa3310a0cebc3c1cdfc3e3f4f9f6f5728"}, - {file = "rpds_py-0.25.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:659d87430a8c8c704d52d094f5ba6fa72ef13b4d385b7e542a08fc240cb4a559"}, - {file = "rpds_py-0.25.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68f6f060f0bbdfb0245267da014d3a6da9be127fe3e8cc4a68c6f833f8a23bb1"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:083a9513a33e0b92cf6e7a6366036c6bb43ea595332c1ab5c8ae329e4bcc0a9c"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:816568614ecb22b18a010c7a12559c19f6fe993526af88e95a76d5a60b8b75fb"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c6564c0947a7f52e4792983f8e6cf9bac140438ebf81f527a21d944f2fd0a40"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c4a128527fe415d73cf1f70a9a688d06130d5810be69f3b553bf7b45e8acf79"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e1d7a4978ed554f095430b89ecc23f42014a50ac385eb0c4d163ce213c325"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d74ec9bc0e2feb81d3f16946b005748119c0f52a153f6db6a29e8cd68636f295"}, - {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3af5b4cc10fa41e5bc64e5c198a1b2d2864337f8fcbb9a67e747e34002ce812b"}, - {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:79dc317a5f1c51fd9c6a0c4f48209c6b8526d0524a6904fc1076476e79b00f98"}, - {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1521031351865e0181bc585147624d66b3b00a84109b57fcb7a779c3ec3772cd"}, - {file = "rpds_py-0.25.1-cp313-cp313-win32.whl", hash = "sha256:5d473be2b13600b93a5675d78f59e63b51b1ba2d0476893415dfbb5477e65b31"}, - {file = "rpds_py-0.25.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7b74e92a3b212390bdce1d93da9f6488c3878c1d434c5e751cbc202c5e09500"}, - {file = "rpds_py-0.25.1-cp313-cp313-win_arm64.whl", hash = "sha256:dd326a81afe332ede08eb39ab75b301d5676802cdffd3a8f287a5f0b694dc3f5"}, - {file = "rpds_py-0.25.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:a58d1ed49a94d4183483a3ce0af22f20318d4a1434acee255d683ad90bf78129"}, - {file = "rpds_py-0.25.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f251bf23deb8332823aef1da169d5d89fa84c89f67bdfb566c49dea1fccfd50d"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbd586bfa270c1103ece2109314dd423df1fa3d9719928b5d09e4840cec0d72"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d273f136e912aa101a9274c3145dcbddbe4bac560e77e6d5b3c9f6e0ed06d34"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:666fa7b1bd0a3810a7f18f6d3a25ccd8866291fbbc3c9b912b917a6715874bb9"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:921954d7fbf3fccc7de8f717799304b14b6d9a45bbeec5a8d7408ccbf531faf5"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d86373ff19ca0441ebeb696ef64cb58b8b5cbacffcda5a0ec2f3911732a194"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c8980cde3bb8575e7c956a530f2c217c1d6aac453474bf3ea0f9c89868b531b6"}, - {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8eb8c84ecea987a2523e057c0d950bcb3f789696c0499290b8d7b3107a719d78"}, - {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e43a005671a9ed5a650f3bc39e4dbccd6d4326b24fb5ea8be5f3a43a6f576c72"}, - {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58f77c60956501a4a627749a6dcb78dac522f249dd96b5c9f1c6af29bfacfb66"}, - {file = "rpds_py-0.25.1-cp313-cp313t-win32.whl", hash = "sha256:2cb9e5b5e26fc02c8a4345048cd9998c2aca7c2712bd1b36da0c72ee969a3523"}, - {file = "rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763"}, - {file = "rpds_py-0.25.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ce4c8e485a3c59593f1a6f683cf0ea5ab1c1dc94d11eea5619e4fb5228b40fbd"}, - {file = "rpds_py-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8222acdb51a22929c3b2ddb236b69c59c72af4019d2cba961e2f9add9b6e634"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4593c4eae9b27d22df41cde518b4b9e4464d139e4322e2127daa9b5b981b76be"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd035756830c712b64725a76327ce80e82ed12ebab361d3a1cdc0f51ea21acb0"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:114a07e85f32b125404f28f2ed0ba431685151c037a26032b213c882f26eb908"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dec21e02e6cc932538b5203d3a8bd6aa1480c98c4914cb88eea064ecdbc6396a"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09eab132f41bf792c7a0ea1578e55df3f3e7f61888e340779b06050a9a3f16e9"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c98f126c4fc697b84c423e387337d5b07e4a61e9feac494362a59fd7a2d9ed80"}, - {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0e6a327af8ebf6baba1c10fadd04964c1965d375d318f4435d5f3f9651550f4a"}, - {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc120d1132cff853ff617754196d0ac0ae63befe7c8498bd67731ba368abe451"}, - {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:140f61d9bed7839446bdd44852e30195c8e520f81329b4201ceead4d64eb3a9f"}, - {file = "rpds_py-0.25.1-cp39-cp39-win32.whl", hash = "sha256:9c006f3aadeda131b438c3092124bd196b66312f0caa5823ef09585a669cf449"}, - {file = "rpds_py-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:a61d0b2c7c9a0ae45732a77844917b427ff16ad5464b4d4f5e4adb955f582890"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b24bf3cd93d5b6ecfbedec73b15f143596c88ee249fa98cefa9a9dc9d92c6f28"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0eb90e94f43e5085623932b68840b6f379f26db7b5c2e6bcef3179bd83c9330f"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d50e4864498a9ab639d6d8854b25e80642bd362ff104312d9770b05d66e5fb13"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c9409b47ba0650544b0bb3c188243b83654dfe55dcc173a86832314e1a6a35d"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:796ad874c89127c91970652a4ee8b00d56368b7e00d3477f4415fe78164c8000"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85608eb70a659bf4c1142b2781083d4b7c0c4e2c90eff11856a9754e965b2540"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4feb9211d15d9160bc85fa72fed46432cdc143eb9cf6d5ca377335a921ac37b"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ccfa689b9246c48947d31dd9d8b16d89a0ecc8e0e26ea5253068efb6c542b76e"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3c5b317ecbd8226887994852e85de562f7177add602514d4ac40f87de3ae45a8"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:454601988aab2c6e8fd49e7634c65476b2b919647626208e376afcd22019eeb8"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1c0c434a53714358532d13539272db75a5ed9df75a4a090a753ac7173ec14e11"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f73ce1512e04fbe2bc97836e89830d6b4314c171587a99688082d090f934d20a"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee86d81551ec68a5c25373c5643d343150cc54672b5e9a0cafc93c1870a53954"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89c24300cd4a8e4a51e55c31a8ff3918e6651b241ee8876a42cc2b2a078533ba"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:771c16060ff4e79584dc48902a91ba79fd93eade3aa3a12d6d2a4aadaf7d542b"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:785ffacd0ee61c3e60bdfde93baa6d7c10d86f15655bd706c89da08068dc5038"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a40046a529cc15cef88ac5ab589f83f739e2d332cb4d7399072242400ed68c9"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85fc223d9c76cabe5d0bff82214459189720dc135db45f9f66aa7cffbf9ff6c1"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0be9965f93c222fb9b4cc254235b3b2b215796c03ef5ee64f995b1b69af0762"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8378fa4a940f3fb509c081e06cb7f7f2adae8cf46ef258b0e0ed7519facd573e"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:33358883a4490287e67a2c391dfaea4d9359860281db3292b6886bf0be3d8692"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1d1fadd539298e70cac2f2cb36f5b8a65f742b9b9f1014dd4ea1f7785e2470bf"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9a46c2fb2545e21181445515960006e85d22025bd2fe6db23e76daec6eb689fe"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50f2c501a89c9a5f4e454b126193c5495b9fb441a75b298c60591d8a2eb92e1b"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d779b325cc8238227c47fbc53964c8cc9a941d5dbae87aa007a1f08f2f77b23"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:036ded36bedb727beeabc16dc1dad7cb154b3fa444e936a03b67a86dc6a5066e"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245550f5a1ac98504147cba96ffec8fabc22b610742e9150138e5d60774686d7"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff7c23ba0a88cb7b104281a99476cccadf29de2a0ef5ce864959a52675b1ca83"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e37caa8cdb3b7cf24786451a0bdb853f6347b8b92005eeb64225ae1db54d1c2b"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2f48ab00181600ee266a095fe815134eb456163f7d6699f525dee471f312cf"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e5fc7484fa7dce57e25063b0ec9638ff02a908304f861d81ea49273e43838c1"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d3c10228d6cf6fe2b63d2e7985e94f6916fa46940df46b70449e9ff9297bd3d1"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:5d9e40f32745db28c1ef7aad23f6fc458dc1e29945bd6781060f0d15628b8ddf"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:35a8d1a24b5936b35c5003313bc177403d8bdef0f8b24f28b1c4a255f94ea992"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6099263f526efff9cf3883dfef505518730f7a7a93049b1d90d42e50a22b4793"}, - {file = "rpds_py-0.25.1.tar.gz", hash = "sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3"}, -] - -[[package]] -name = "send2trash" -version = "2.1.0" -description = "Send file to trash natively under Mac OS X, Windows and Linux" -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "send2trash-2.1.0-py3-none-any.whl", hash = "sha256:0da2f112e6d6bb22de6aa6daa7e144831a4febf2a87261451c4ad849fe9a873c"}, - {file = "send2trash-2.1.0.tar.gz", hash = "sha256:1c72b39f09457db3c05ce1d19158c2cbef4c32b8bedd02c155e49282b7ea7459"}, -] - -[package.extras] -nativelib = ["pyobjc (>=9.0) ; sys_platform == \"darwin\"", "pywin32 (>=305) ; sys_platform == \"win32\""] -test = ["pytest (>=8)"] - -[[package]] -name = "setuptools" -version = "80.9.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, - {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] - -[[package]] -name = "six" -version = "1.17.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "dev", "docs"] -files = [ - {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, - {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, -] - -[[package]] -name = "soupsieve" -version = "2.8.3" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -files = [ - {file = "soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95"}, - {file = "soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.41" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "SQLAlchemy-2.0.41-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6854175807af57bdb6425e47adbce7d20a4d79bbfd6f6d6519cd10bb7109a7f8"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05132c906066142103b83d9c250b60508af556982a385d96c4eaa9fb9720ac2b"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b4af17bda11e907c51d10686eda89049f9ce5669b08fbe71a29747f1e876036"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c0b0e5e1b5d9f3586601048dd68f392dc0cc99a59bb5faf18aab057ce00d00b2"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0b3dbf1e7e9bc95f4bac5e2fb6d3fb2f083254c3fdd20a1789af965caf2d2348"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-win32.whl", hash = "sha256:1e3f196a0c59b0cae9a0cd332eb1a4bda4696e863f4f1cf84ab0347992c548c2"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-win_amd64.whl", hash = "sha256:6ab60a5089a8f02009f127806f777fca82581c49e127f08413a66056bd9166dd"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-win32.whl", hash = "sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-win_amd64.whl", hash = "sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90144d3b0c8b139408da50196c5cad2a6909b51b23df1f0538411cd23ffa45d3"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:023b3ee6169969beea3bb72312e44d8b7c27c75b347942d943cf49397b7edeb5"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725875a63abf7c399d4548e686debb65cdc2549e1825437096a0af1f7e374814"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81965cc20848ab06583506ef54e37cf15c83c7e619df2ad16807c03100745dea"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dd5ec3aa6ae6e4d5b5de9357d2133c07be1aff6405b136dad753a16afb6717dd"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ff8e80c4c4932c10493ff97028decfdb622de69cae87e0f127a7ebe32b4069c6"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-win32.whl", hash = "sha256:4d44522480e0bf34c3d63167b8cfa7289c1c54264c2950cc5fc26e7850967e45"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-win_amd64.whl", hash = "sha256:81eedafa609917040d39aa9332e25881a8e7a0862495fcdf2023a9667209deda"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9a420a91913092d1e20c86a2f5f1fc85c1a8924dbcaf5e0586df8aceb09c9cc2"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:906e6b0d7d452e9a98e5ab8507c0da791856b2380fdee61b765632bb8698026f"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a373a400f3e9bac95ba2a06372c4fd1412a7cee53c37fc6c05f829bf672b8769"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:087b6b52de812741c27231b5a3586384d60c353fbd0e2f81405a814b5591dc8b"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:34ea30ab3ec98355235972dadc497bb659cc75f8292b760394824fab9cf39826"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8280856dd7c6a68ab3a164b4a4b1c51f7691f6d04af4d4ca23d6ecf2261b7923"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-win32.whl", hash = "sha256:b50eab9994d64f4a823ff99a0ed28a6903224ddbe7fef56a6dd865eec9243440"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-win_amd64.whl", hash = "sha256:5e22575d169529ac3e0a120cf050ec9daa94b6a9597993d1702884f6954a7d71"}, - {file = "sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576"}, - {file = "sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9"}, -] - -[package.dependencies] -greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] -aioodbc = ["aioodbc", "greenlet (>=1)"] -aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (>=1)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -groups = ["dev", "docs"] -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "tapipy" -version = "1.8.4" -description = "Python lib for interacting with an instance of the Tapis API Framework" -optional = false -python-versions = "<4.0,>=3.8" -groups = ["main"] -files = [ - {file = "tapipy-1.8.4-py3-none-any.whl", hash = "sha256:4065e1a340adfdce138e6d0fdcf91567e108e28429b4ec190cec0347822453bb"}, - {file = "tapipy-1.8.4.tar.gz", hash = "sha256:a6ded6643d405dd47073c6dc8d57a66d640c93ee90cd343a939f042fa1d913f9"}, -] - -[package.dependencies] -atomicwrites = ">=1.4.0,<2.0.0" -certifi = ">=2020.11.8" -cloudpickle = ">=1.6.0" -cryptography = ">=3.3.2" -jsonschema = ">=4.8.0,<5.0.0" -openapi_core = "0.16.0" -openapi_spec_validator = ">=0.5.0,<0.6.0" -PyJWT = ">=1.7.1" -python_dateutil = ">=2.5.3,<3.0.0" -pyyaml = ">=5.4" -requests = ">=2.20.0,<3.0.0" -setuptools = ">=21.0.0" -six = ">=1.10,<2.0" -urllib3 = ">=1.26.5,<2.0.0" - -[[package]] -name = "terminado" -version = "0.18.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, - {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, -] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] -typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] - -[[package]] -name = "tinycss2" -version = "1.4.0" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.8" -groups = ["docs"] -files = [ - {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, - {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["pytest", "ruff"] - -[[package]] -name = "tokenize-rt" -version = "6.2.0" -description = "A wrapper around the stdlib `tokenize` which roundtrips." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "tokenize_rt-6.2.0-py2.py3-none-any.whl", hash = "sha256:a152bf4f249c847a66497a4a95f63376ed68ac6abf092a2f7cfb29d044ecff44"}, - {file = "tokenize_rt-6.2.0.tar.gz", hash = "sha256:8439c042b330c553fdbe1758e4a05c0ed460dbbbb24a606f11f0dee75da4cad6"}, -] - -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "python_version < \"3.11\"" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - -[[package]] -name = "tornado" -version = "6.5.1" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -files = [ - {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7"}, - {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6"}, - {file = "tornado-6.5.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b77e9dfa7ed69754a54c89d82ef746398be82f749df69c4d3abe75c4d1ff4888"}, - {file = "tornado-6.5.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253b76040ee3bab8bcf7ba9feb136436a3787208717a1fb9f2c16b744fba7331"}, - {file = "tornado-6.5.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:308473f4cc5a76227157cdf904de33ac268af770b2c5f05ca6c1161d82fdd95e"}, - {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:caec6314ce8a81cf69bd89909f4b633b9f523834dc1a352021775d45e51d9401"}, - {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:13ce6e3396c24e2808774741331638ee6c2f50b114b97a55c5b442df65fd9692"}, - {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5cae6145f4cdf5ab24744526cc0f55a17d76f02c98f4cff9daa08ae9a217448a"}, - {file = "tornado-6.5.1-cp39-abi3-win32.whl", hash = "sha256:e0a36e1bc684dca10b1aa75a31df8bdfed656831489bc1e6a6ebed05dc1ec365"}, - {file = "tornado-6.5.1-cp39-abi3-win_amd64.whl", hash = "sha256:908e7d64567cecd4c2b458075589a775063453aeb1d2a1853eedb806922f568b"}, - {file = "tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7"}, - {file = "tornado-6.5.1.tar.gz", hash = "sha256:84ceece391e8eb9b2b95578db65e920d2a61070260594819589609ba9bc6308c"}, -] - -[[package]] -name = "tqdm" -version = "4.67.1" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, - {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] -discord = ["requests"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "typing-extensions" -version = "4.14.0" -description = "Backported and Experimental Type Hints for Python 3.9+" -optional = false -python-versions = ">=3.9" -groups = ["main", "dev", "docs"] -files = [ - {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, - {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, -] -markers = {dev = "python_version < \"3.12\""} - -[[package]] -name = "tzdata" -version = "2025.2" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -groups = ["main", "docs"] -files = [ - {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, - {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, -] - -[[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" -optional = false -python-versions = ">=3.7" -groups = ["docs"] -files = [ - {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, - {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, -] - -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] - -[[package]] -name = "urllib3" -version = "1.26.20" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -groups = ["main"] -files = [ - {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, - {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, -] - -[package.extras] -brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -groups = ["dev", "docs"] -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "webcolors" -version = "24.11.1" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -markers = "python_version == \"3.9\"" -files = [ - {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, - {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, -] - -[[package]] -name = "webcolors" -version = "25.10.0" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.10" -groups = ["docs"] -markers = "python_version >= \"3.10\"" -files = [ - {file = "webcolors-25.10.0-py3-none-any.whl", hash = "sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d"}, - {file = "webcolors-25.10.0.tar.gz", hash = "sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf"}, -] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -groups = ["docs"] -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.9.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.9" -groups = ["docs"] -files = [ - {file = "websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef"}, - {file = "websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx_rtd_theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["pytest", "websockets"] - -[[package]] -name = "werkzeug" -version = "3.1.3" -description = "The comprehensive WSGI web application library." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, - {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, -] - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog (>=2.3)"] - -[[package]] -name = "zipp" -version = "3.23.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version == \"3.9\"" -files = [ - {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, - {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - -[metadata] -lock-version = "2.1" -python-versions = "^3.9" -content-hash = "593fd5d3dc363dd953f15903d4d1ae08ca4488cde4bbe8c07ae31807ebf5c6b7" diff --git a/pyproject.toml b/pyproject.toml index 3438734..f32cf1d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,47 +1,42 @@ -[tool.poetry] +[project] name = "dapi" version = "0.4.9" description = "DesignSafe API" +readme = "README.md" +license = "MIT" +requires-python = ">=3.9" authors = [ - "Krishna Kumar ", - "Pedro Arduino ", - "Scott Brandenberg ", + { name = "Krishna Kumar", email = "krishnak@utexas.edu" }, + { name = "Pedro Arduino", email = "parduino@uw.edu" }, + { name = "Scott Brandenberg", email = "sjbrandenberg@ucla.edu" }, ] -readme = "README.md" -packages = [{include = "dapi"}] - -[tool.poetry.dependencies] -python = "^3.9" # Project supports Python 3.9 and newer - -# Numpy versioning -numpy = [ - { version = ">=1.21.0,<2.0", python = "~3.9" }, # For Python 3.9.x - { version = ">=1.21.0", python = ">=3.10" } # For Python 3.10 and newer (latest compatible) +dependencies = [ + "numpy>=1.21.0,<2.0; python_version < '3.10'", + "numpy>=1.21.0; python_version >= '3.10'", + "pandas>=1.3.0,<3.0.0; python_version < '3.10'", + "pandas>=2.1.0; python_version >= '3.10'", + "sqlalchemy>=2.0.41", + "pymysql>=1.1.1", + "python-dotenv>=1.1.0", + "tqdm>=4.67.1", + "exceptiongroup>=1.3.0; python_version < '3.11'", + "tapipy>=1.8.4", + "jsonschema>=4.24.0", ] -# Pandas versioning -pandas = [ - { version = ">=1.3.0,<3.0.0", python = "~3.9" }, # For Python 3.9.x - { version = ">=2.1.0", python = ">=3.10" } # For Python 3.10 and newer (latest compatible) +[project.optional-dependencies] +dev = [ + "pytest>=7.4.2", + "black[jupyter]>=23.11.0", + "ipykernel>=6.26.0", +] +docs = [ + "jupyter-book>=2.0.0", ] -sqlalchemy = "^2.0.41" -pymysql = "^1.1.1" -python-dotenv = "^1.1.0" -tqdm = "^4.67.1" -# exceptiongroup is a backport for ExceptionGroup, which is built-in in Python 3.11+ -# This will install exceptiongroup only for Python versions less than 3.11 (i.e., 3.9 and 3.10) -exceptiongroup = { version = "^1.3.0", python = "<3.11" } -tapipy = "^1.8.4" -jsonschema = "^4.24.0" - -[tool.poetry.group.dev.dependencies] -pytest = "^7.4.2" -black = {extras = ["jupyter"], version = "^23.11.0"} -ipykernel = "^6.26.0" - -[tool.poetry.group.docs.dependencies] -jupyter-book = "^2.0.0" [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" \ No newline at end of file +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["dapi"] From 1bacb768fd0efd302f732fd024030fb45c5c5c28 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 09:27:23 -0500 Subject: [PATCH 04/21] Update README for uv, TMS credentials, and Jupyter Book v2 --- README.md | 140 +++++++++++++++++++++++++----------------------------- 1 file changed, 66 insertions(+), 74 deletions(-) diff --git a/README.md b/README.md index b60dff6..0b49a89 100644 --- a/README.md +++ b/README.md @@ -2,29 +2,36 @@ [![build and test](https://github.com/DesignSafe-CI/dapi/actions/workflows/build-test.yml/badge.svg)](https://github.com/DesignSafe-CI/dapi/actions/workflows/build-test.yml) [![License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE.md) -[![Docs](https://img.shields.io/badge/view-docs-8A2BE2?color=8A2BE2)](https://designsafe-ci.github.io/dapi/dapi/index.html) +[![PyPI version](https://badge.fury.io/py/dapi.svg)](https://badge.fury.io/py/dapi) +[![Docs](https://img.shields.io/badge/view-docs-8A2BE2?color=8A2BE2)](https://designsafe-ci.github.io/dapi/) -`dapi` is a library that simplifies the process of submitting, running, and monitoring [TAPIS v3](https://tapis.readthedocs.io/en/latest/) jobs on [DesignSafe](https://designsafe-ci.org) via [Jupyter Notebooks](https://jupyter.designsafe-ci.org). +`dapi` is a Python library that simplifies the process of submitting, running, and monitoring [TAPIS v3](https://tapis.readthedocs.io/en/latest/) jobs on [DesignSafe](https://designsafe-ci.org) via [Jupyter Notebooks](https://jupyter.designsafe-ci.org) or from the command line. dapi - ## Features ### Jobs +- Generate TAPIS v3 job requests with automatic app parameter mapping +- Submit, monitor (with progress bars), and manage jobs +- Access and download job outputs +- Discover and explore available DesignSafe applications -* Get TAPIS v3 templates for jobs: No need to fiddle with complex API requests. `dapi` abstracts away the complexities. +### TMS Credentials +- Establish, check, and revoke SSH keys on TACC execution systems (Frontera, Stampede3, LS6) +- Works from any environment -- DesignSafe JupyterHub, command line, CI/CD -* Seamless Integration with DesignSafe Jupyter Notebooks: Launch DesignSafe applications directly from the Jupyter environment. +### Files +- Translate DesignSafe paths (`/MyData`, `/CommunityData`, `/projects`) to TAPIS URIs +- Upload, download, and list files on DesignSafe storage ### Database - Connects to SQL databases on DesignSafe: | Database | dbname | env_prefix | |----------|--------|------------| | NGL | `ngl`| `NGL_` | -| Earthake Recovery | `eq` | `EQ_` | +| Earthquake Recovery | `eq` | `EQ_` | | Vp | `vp` | `VP_` | Define the following environment variables: @@ -37,115 +44,100 @@ Define the following environment variables: For e.g., to add the environment variable `NGL_DB_USER` edit `~/.bashrc`, `~/.zshrc`, or a similar shell-specific configuration file for the current user and add `export NGL_DB_USER="dspublic"`. - ## Installation -Install `dapi` via pip - ```shell -pip3 install dapi +pip install dapi ``` -To install the current development version of the library use: +To install the current development version: ```shell pip install git+https://github.com/DesignSafe-CI/dapi.git --quiet ``` -## Example usage: +## Quick Start -### Storing credentials +### Authentication -Dapi uses the Tapis v3 SDK to authenticate with the DesignSafe API. To store your credentials, create a `.env` file in the root of your project with the following content: +Create a `.env` file with your DesignSafe credentials: ```shell -DESIGNSAFE_USERNAME= -DESIGNSAFE_PASSWORD= +DESIGNSAFE_USERNAME=your_username +DESIGNSAFE_PASSWORD=your_password ``` -### Jobs - -* [Jupyter Notebook Templates](example-notebooks/template-mpm-run.ipynb) using dapi. - -* View [dapi API doc](https://designsafe-ci.github.io/dapi/dapi/index.html) - -On [DesignSafe Jupyter](https://jupyter.designsafe-ci.org/): - -Install the latest version of `dapi` and restart the kernel (Kernel >> Restart Kernel): +### Setup and submit a job ```python -# Remove any previous installations -!pip uninstall dapi -y -# Install -!pip install dapi --quiet -``` - -* Import `dapi` library -```python -import dapi -``` - -* To list all functions in `dapi` -```python -dir(dapi) +from dapi import DSClient + +# Authenticate +client = DSClient() + +# Establish TMS credentials (one-time per system) +client.systems.establish_credentials("frontera") + +# Submit a job +job_request = client.jobs.generate_request( + app_id="matlab-r2023a", + input_dir_uri="/MyData/analysis/input/", + script_filename="run_analysis.m", + max_minutes=30, + allocation="your_allocation" +) +job = client.jobs.submit_request(job_request) +final_status = job.monitor() ``` ### Database + ```python -from dapi.db import DSDatabase +from dapi import DSClient -db = DSDatabase("ngl") -sql = 'SELECT * FROM SITE' -df = db.read_sql(sql) +client = DSClient() +df = client.db.ngl.read_sql("SELECT * FROM SITE LIMIT 10") print(df) - -# Optionally, close the database connection when done -db.close() ``` - ## Support -For any questions, issues, or feedback submit an [issue](https://github.com/DesignSafe-CI/dapi/issues/new) +For any questions, issues, or feedback submit an [issue](https://github.com/DesignSafe-CI/dapi/issues/new). ## Development -To develop or test the library locally. Install [Poetry](https://python-poetry.org/docs/#installation). In the current repository run the following commands +Install [uv](https://docs.astral.sh/uv/getting-started/installation/), then: ```shell -poetry shell -poetry install -poetry build +uv venv +uv pip install -e ".[dev]" ``` -To run the unit test +Run tests: ```shell -poetry run pytest -v +pytest tests/ -v ``` +Build the package: +```shell +uv build +``` -## License - -`dapi` is licensed under the [MIT License](LICENSE.md). - -## Authors - -* Krishna Kumar, University of Texas at Austin -* Prof. Pedro Arduino, University of Washington -* Prof. Scott Brandenberg, University of California Los Angeles - +### Documentation -## Documentation +Documentation uses [Jupyter Book v2](https://mystmd.org). To build and serve locally: -View [dapi API doc](https://designsafe-ci.github.io/dapi/dapi/index.html) +```shell +uv pip install -e ".[docs]" +jupyter-book start +``` -### Running documentation locally +## License -To serve the MkDocs documentation locally: +`dapi` is licensed under the [MIT License](LICENSE.md). -```shell -poetry install -poetry run mkdocs serve -``` +## Authors -This will start a local server at `http://127.0.0.1:8000/dapi/` where you can view the documentation. \ No newline at end of file +- Krishna Kumar, University of Texas at Austin +- Prof. Pedro Arduino, University of Washington +- Prof. Scott Brandenberg, University of California Los Angeles From 14e9b9ced3a5cad1845513922907c305b87780f9 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 09:30:52 -0500 Subject: [PATCH 05/21] Add _build to gitignore, hide MyST footer in docs --- .gitignore | 1 + myst.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index b04ca41..b1078b1 100644 --- a/.gitignore +++ b/.gitignore @@ -141,6 +141,7 @@ venv.bak/ .ropeproject # Jupyter Book build output +/_build/ /out/ # mkdocs documentation (legacy) diff --git a/myst.yml b/myst.yml index c3bfecb..2a8474c 100644 --- a/myst.yml +++ b/myst.yml @@ -55,3 +55,4 @@ site: options: logo: docs/nheri.png favicon: docs/favicon.ico + hide_footer_text: true From a8a65cae8d37f313e73beb4d8cb762fe11c4eef9 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 09:50:57 -0500 Subject: [PATCH 06/21] Add ds.jobs.list() and standardize docs to use ds variable Add list_jobs() to fetch Tapis jobs as a pandas DataFrame with optional filtering by app_id and status. Includes 14 tests. Rename client variable to ds across all documentation examples. --- dapi/client.py | 59 +++++++++++++--- dapi/jobs.py | 85 +++++++++++++++++++++++ docs/api/index.md | 16 ++--- docs/api/jobs.md | 6 ++ docs/authentication.md | 62 ++++++++--------- docs/database.md | 74 ++++++++++---------- docs/examples/database.md | 58 ++++++++-------- docs/index.md | 8 +-- docs/jobs.md | 91 ++++++++++++++++--------- docs/quickstart.md | 66 +++++++++--------- tests/jobs/test_list_jobs.py | 126 +++++++++++++++++++++++++++++++++++ 11 files changed, 468 insertions(+), 183 deletions(-) create mode 100644 tests/jobs/test_list_jobs.py diff --git a/dapi/client.py b/dapi/client.py index 3abe6b9..e227646 100644 --- a/dapi/client.py +++ b/dapi/client.py @@ -40,21 +40,21 @@ class DSClient: Example: Basic usage with automatic authentication: - >>> client = DSClient() + >>> ds = DSClient() Enter DesignSafe Username: myuser Enter DesignSafe Password: [hidden] Authentication successful. Using explicit credentials: - >>> client = DSClient(username="myuser", password="mypass") + >>> ds = DSClient(username="myuser", password="mypass") Authentication successful. Using a pre-authenticated Tapis client: >>> tapis = Tapis(base_url="https://designsafe.tapis.io", ...) >>> tapis.get_tokens() - >>> client = DSClient(tapis_client=tapis) + >>> ds = DSClient(tapis_client=tapis) """ def __init__(self, tapis_client: Optional[Tapis] = None, **auth_kwargs): @@ -196,7 +196,7 @@ def translate_uri_to_path(self, *args, **kwargs) -> str: str: The corresponding DesignSafe local path (e.g., /home/jupyter/MyData/path). Example: - >>> local_path = client.files.translate_uri_to_path("tapis://designsafe.storage.default/user/data") + >>> local_path = ds.files.translate_uri_to_path("tapis://designsafe.storage.default/user/data") >>> print(local_path) # "/home/jupyter/MyData/data" """ return files_module.tapis_uri_to_local_path(*args, **kwargs) @@ -453,7 +453,7 @@ def generate_request( JobSubmissionError: If job request generation fails. Example: - >>> job_request = client.jobs.generate_request( + >>> job_request = ds.jobs.generate_request( ... app_id="matlab-r2023a", ... input_dir_uri="tapis://designsafe.storage.default/username/input/", ... script_filename="run_analysis.m", @@ -506,8 +506,8 @@ def submit_request(self, job_request: Dict[str, Any]) -> SubmittedJob: JobSubmissionError: If the Tapis submission fails or encounters an error. Example: - >>> job_request = client.jobs.generate_request(...) - >>> submitted_job = client.jobs.submit_request(job_request) + >>> job_request = ds.jobs.generate_request(...) + >>> submitted_job = ds.jobs.submit_request(job_request) >>> print(f"Job submitted with UUID: {submitted_job.uuid}") """ return jobs_module.submit_job_request(self._tapis, job_request) @@ -523,7 +523,7 @@ def get(self, job_uuid: str) -> SubmittedJob: SubmittedJob: A SubmittedJob object for monitoring and managing the job. Example: - >>> job = client.jobs.get("12345678-1234-1234-1234-123456789abc") + >>> job = ds.jobs.get("12345678-1234-1234-1234-123456789abc") >>> status = job.status """ return SubmittedJob(self._tapis, job_uuid) @@ -541,7 +541,7 @@ def get_status(self, job_uuid: str) -> str: JobMonitorError: If status retrieval fails. Example: - >>> status = client.jobs.get_status("12345678-1234-1234-1234-123456789abc") + >>> status = ds.jobs.get_status("12345678-1234-1234-1234-123456789abc") >>> print(f"Job status: {status}") """ return jobs_module.get_job_status(self._tapis, job_uuid) @@ -555,7 +555,7 @@ def get_runtime_summary(self, job_uuid: str, verbose: bool = False): Defaults to False. Example: - >>> client.jobs.get_runtime_summary("12345678-1234-1234-1234-123456789abc") + >>> ds.jobs.get_runtime_summary("12345678-1234-1234-1234-123456789abc") Runtime Summary --------------- QUEUED time: 00:05:30 @@ -572,7 +572,44 @@ def interpret_status(self, final_status: str, job_uuid: Optional[str] = None): job_uuid (str, optional): The job UUID for context in the message. Example: - >>> client.jobs.interpret_status("FINISHED", "12345678-1234-1234-1234-123456789abc") + >>> ds.jobs.interpret_status("FINISHED", "12345678-1234-1234-1234-123456789abc") Job 12345678-1234-1234-1234-123456789abc completed successfully. """ jobs_module.interpret_job_status(final_status, job_uuid) + + def list( + self, + app_id: Optional[str] = None, + status: Optional[str] = None, + limit: int = 100, + verbose: bool = False, + ): + """List jobs as a pandas DataFrame with optional filtering. + + Fetches jobs from Tapis ordered by creation date (newest first) + and returns them as a DataFrame. Filters are applied client-side. + + Args: + app_id (str, optional): Filter by application ID. + status (str, optional): Filter by job status (e.g., "FINISHED"). + Case-insensitive. + limit (int, optional): Maximum jobs to fetch. Defaults to 100. + verbose (bool, optional): Print job count. Defaults to False. + + Returns: + pd.DataFrame: Job metadata with formatted datetime columns. + + Raises: + JobMonitorError: If the Tapis API call fails. + + Example: + >>> df = ds.jobs.list(app_id="matlab-r2023a", status="FINISHED") + >>> print(df[["name", "uuid", "status", "created_dt"]]) + """ + return jobs_module.list_jobs( + self._tapis, + app_id=app_id, + status=status, + limit=limit, + verbose=verbose, + ) diff --git a/dapi/jobs.py b/dapi/jobs.py index 8e5d037..c312b20 100644 --- a/dapi/jobs.py +++ b/dapi/jobs.py @@ -10,6 +10,7 @@ from tapipy.errors import BaseTapyException from dataclasses import dataclass, field, asdict from tqdm.auto import tqdm +import pandas as pd from .apps import get_app_details from .exceptions import ( JobSubmissionError, @@ -1340,3 +1341,87 @@ def interpret_job_status(final_status: str, job_uuid: Optional[str] = None): print(f"{job_id_str} ended with status: {final_status}") else: print(f"{job_id_str} ended with an unexpected status: {final_status}") + + +def list_jobs( + tapis_client: Tapis, + app_id: Optional[str] = None, + status: Optional[str] = None, + limit: int = 100, + verbose: bool = False, +) -> pd.DataFrame: + """Fetch Tapis jobs and return them as a pandas DataFrame. + + Retrieves jobs from Tapis ordered by creation date (newest first) + and optionally filters by app ID and/or status. Filters are applied + client-side after fetching. + + Args: + tapis_client: Authenticated Tapis client instance. + app_id: Filter by application ID (e.g., "opensees-mp-s3"). + status: Filter by job status (e.g., "FINISHED", "FAILED"). + Case-insensitive. + limit: Maximum number of jobs to fetch from Tapis. Defaults to 100. + verbose: If True, prints the number of jobs found. + + Returns: + DataFrame with job metadata and formatted datetime columns. + Priority columns appear first: name, uuid, status, appId, appVersion, + created_dt, ended_dt. Additional datetime columns include _dt + (timezone-aware) and _date (date only) variants for created, ended, + remoteStarted, and lastUpdated. + + Raises: + JobMonitorError: If the Tapis API call fails. + + Example: + >>> df = list_jobs(t, app_id="matlab-r2023a", status="FINISHED") + >>> print(df[["name", "uuid", "status", "created_dt"]]) + """ + try: + jobs_list = tapis_client.jobs.getJobList( + limit=limit, + orderBy="created(desc)", + ) + except BaseTapyException as e: + raise JobMonitorError(f"Failed to list jobs: {e}") from e + except Exception as e: + raise JobMonitorError(f"Unexpected error listing jobs: {e}") from e + + if not jobs_list: + if verbose: + print("Found 0 jobs.") + return pd.DataFrame() + + # Convert TapisResult objects to dicts + jobs_dicts = [job.__dict__ for job in jobs_list] + df = pd.DataFrame(jobs_dicts) + + # Apply client-side filters + if app_id and "appId" in df.columns: + df = df[df["appId"] == app_id] + if status and "status" in df.columns: + df = df[df["status"] == status.upper()] + + # Add formatted datetime columns + time_cols = ["created", "ended", "remoteStarted", "lastUpdated"] + for col in time_cols: + if col in df.columns: + df[f"{col}_dt"] = pd.to_datetime(df[col], utc=True, errors="coerce") + df[f"{col}_date"] = df[f"{col}_dt"].dt.date + + # Reorder: priority columns first + priority = [ + "name", "uuid", "status", "appId", "appVersion", + "created_dt", "ended_dt", + ] + priority_present = [c for c in priority if c in df.columns] + remaining = [c for c in df.columns if c not in priority_present] + df = df[priority_present + remaining] + + df = df.reset_index(drop=True) + + if verbose: + print(f"Found {len(df)} jobs.") + + return df diff --git a/docs/api/index.md b/docs/api/index.md index f7e194e..fab0179 100644 --- a/docs/api/index.md +++ b/docs/api/index.md @@ -29,20 +29,20 @@ The DAPI package is organized into several core modules: from dapi import DSClient # Initialize client -client = DSClient() +ds = DSClient() # Access different services -client.jobs.generate_request(...) -client.files.upload(...) -client.db.ngl.read_sql(...) +ds.jobs.generate_request(...) +ds.files.upload(...) +ds.db.ngl.read_sql(...) ``` ### **Common Operations** -- **Submit Jobs**: `client.jobs.submit_request(job_dict)` +- **Submit Jobs**: `ds.jobs.submit_request(job_dict)` - **Monitor Jobs**: `submitted_job.monitor()` -- **File Upload**: `client.files.upload(local_path, remote_uri)` -- **File Download**: `client.files.download(remote_uri, local_path)` -- **Database Query**: `client.db.ngl.read_sql("SELECT * FROM table")` +- **File Upload**: `ds.files.upload(local_path, remote_uri)` +- **File Download**: `ds.files.download(remote_uri, local_path)` +- **Database Query**: `ds.db.ngl.read_sql("SELECT * FROM table")` ### **Advanced Features** - **Archive Management**: Custom job result organization diff --git a/docs/api/jobs.md b/docs/api/jobs.md index 48c7097..ffc7176 100644 --- a/docs/api/jobs.md +++ b/docs/api/jobs.md @@ -28,6 +28,12 @@ Job submission, monitoring, and management functionality for DesignSafe computat .. autofunction:: dapi.jobs.interpret_job_status ``` +## Listing Jobs + +```{eval-rst} +.. autofunction:: dapi.jobs.list_jobs +``` + ## SubmittedJob Class ```{eval-rst} diff --git a/docs/authentication.md b/docs/authentication.md index b85a9ae..8dd9611 100644 --- a/docs/authentication.md +++ b/docs/authentication.md @@ -32,7 +32,7 @@ Then initialize the client: from dapi import DSClient # Automatically uses environment variables -client = DSClient() +ds = DSClient() ``` #### Persistent Environment Variables @@ -65,10 +65,10 @@ Initialize the client: from dapi import DSClient # Automatically loads from .env file -client = DSClient() +ds = DSClient() # Or specify a custom .env file path -client = DSClient(env_file="path/to/custom.env") +ds = DSClient(env_file="path/to/custom.env") ``` ### Method 3: Interactive Prompts @@ -78,7 +78,7 @@ If no credentials are found, dapi will prompt you: ```python from dapi import DSClient -client = DSClient() +ds = DSClient() # Output: # Enter DesignSafe Username: your_username # Enter DesignSafe Password: [hidden input] @@ -92,7 +92,7 @@ Pass credentials directly (not recommended for production): ```python from dapi import DSClient -client = DSClient( +ds = DSClient( username="your_username", password="your_password" ) @@ -103,10 +103,10 @@ client = DSClient( ### 1. Use Environment Variables or .env Files ```python # Good - uses environment variables -client = DSClient() +ds = DSClient() # Avoid - credentials in code -client = DSClient(username="user", password="pass") +ds = DSClient(username="user", password="pass") ``` ### 2. Protect Your .env File @@ -138,7 +138,7 @@ os.environ['DESIGNSAFE_USERNAME'] = 'your_username' os.environ['DESIGNSAFE_PASSWORD'] = 'your_password' from dapi import DSClient -client = DSClient() +ds = DSClient() ``` ### Using .env Files in Jupyter @@ -152,7 +152,7 @@ with open('.env', 'w') as f: f.write('DESIGNSAFE_PASSWORD=your_password\n') from dapi import DSClient -client = DSClient() +ds = DSClient() ``` ## Advanced Configuration @@ -162,7 +162,7 @@ client = DSClient() ```python from dapi import DSClient -client = DSClient( +ds = DSClient( base_url="https://designsafe.tapis.io", # Default username="your_username", password="your_password" @@ -175,10 +175,10 @@ client = DSClient( from dapi import DSClient # Development environment -dev_client = DSClient(env_file=".env.development") +dev_ds = DSClient(env_file=".env.development") # Production environment -prod_client = DSClient(env_file=".env.production") +prod_ds = DSClient(env_file=".env.production") ``` ## TMS Credentials (Execution System Access) @@ -194,35 +194,35 @@ TMS credentials only need to be established **once per system**. After that, the ```python from dapi import DSClient -client = DSClient() +ds = DSClient() # Establish TMS credentials on execution systems -client.systems.establish_credentials("frontera") -client.systems.establish_credentials("stampede3") -client.systems.establish_credentials("ls6") +ds.systems.establish_credentials("frontera") +ds.systems.establish_credentials("stampede3") +ds.systems.establish_credentials("ls6") ``` If credentials already exist, `establish_credentials` does nothing (idempotent). To force re-creation: ```python -client.systems.establish_credentials("frontera", force=True) +ds.systems.establish_credentials("frontera", force=True) ``` ### Check Credentials ```python # Check if credentials exist before submitting a job -if client.systems.check_credentials("frontera"): +if ds.systems.check_credentials("frontera"): print("Ready to submit jobs on Frontera") else: - client.systems.establish_credentials("frontera") + ds.systems.establish_credentials("frontera") ``` ### Revoke Credentials ```python # Remove credentials (e.g., to reset keys) -client.systems.revoke_credentials("frontera") +ds.systems.revoke_credentials("frontera") ``` ### Using TMS from Outside DesignSafe @@ -239,12 +239,12 @@ DESIGNSAFE_PASSWORD=your_password from dapi import DSClient # Works from anywhere with network access to designsafe.tapis.io -client = DSClient() -client.systems.establish_credentials("frontera") +ds = DSClient() +ds.systems.establish_credentials("frontera") # Now submit jobs as usual -job_request = client.jobs.generate_request(...) -job = client.jobs.submit_request(job_request) +job_request = ds.jobs.generate_request(...) +job = ds.jobs.submit_request(job_request) ``` ### Troubleshooting TMS @@ -269,11 +269,11 @@ CredentialError: System 'nonexistent' not found. from dapi import DSClient try: - client = DSClient() + ds = DSClient() print("Authentication successful!") - + # Test API access - apps = client.apps.find("", verbose=False) + apps = ds.apps.find("", verbose=False) print(f"API access confirmed. Found {len(apps)} apps.") except Exception as e: @@ -285,7 +285,7 @@ except Exception as e: ```python # Test database authentication try: - df = client.db.ngl.read_sql("SELECT COUNT(*) FROM SITE") + df = ds.db.ngl.read_sql("SELECT COUNT(*) FROM SITE") print("Database access confirmed") except Exception as e: print(f"Database access failed: {e}") @@ -374,15 +374,15 @@ with open('.env', 'w') as f: # 2. Initialize client from dapi import DSClient -client = DSClient() +ds = DSClient() # 3. Test authentication print("Testing TAPIS API access...") -apps = client.apps.find("matlab", verbose=False) +apps = ds.apps.find("matlab", verbose=False) print(f"Found {len(apps)} MATLAB apps") print("Testing database access...") -df = client.db.ngl.read_sql("SELECT COUNT(*) FROM SITE") +df = ds.db.ngl.read_sql("SELECT COUNT(*) FROM SITE") print(f"NGL database has {df.iloc[0, 0]} sites") print("All authentication successful!") diff --git a/docs/database.md b/docs/database.md index c4631e5..7372111 100644 --- a/docs/database.md +++ b/docs/database.md @@ -18,14 +18,14 @@ dapi provides access to three major research databases: from dapi import DSClient # Initialize client -client = DSClient() +ds = DSClient() # Query NGL database -df = client.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") +df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") print(df) # Query with parameters -site_data = client.db.ngl.read_sql( +site_data = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_NAME = %s", params=["Amagasaki"] ) @@ -91,18 +91,18 @@ EQ_DB_PORT=3306 ```python from dapi import DSClient -client = DSClient() +ds = DSClient() # Count records in NGL database -count_df = client.db.ngl.read_sql("SELECT COUNT(*) as total_sites FROM SITE") +count_df = ds.db.ngl.read_sql("SELECT COUNT(*) as total_sites FROM SITE") print(f"Total sites: {count_df['total_sites'].iloc[0]}") # Get first 10 sites -sites_df = client.db.ngl.read_sql("SELECT * FROM SITE LIMIT 10") +sites_df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 10") print(sites_df) # Get site information -site_info = client.db.ngl.read_sql(""" +site_info = ds.db.ngl.read_sql(""" SELECT SITE_NAME, SITE_LAT, SITE_LON, SITE_GEOL FROM SITE WHERE SITE_LAT > 35 @@ -116,20 +116,20 @@ print(site_info) ```python # Query with single parameter site_name = "Amagasaki" -site_data = client.db.ngl.read_sql( +site_data = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_NAME = %s", params=[site_name] ) # Query with multiple parameters min_lat, max_lat = 32.0, 38.0 -california_sites = client.db.ngl.read_sql( +california_sites = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_LAT BETWEEN %s AND %s", params=[min_lat, max_lat] ) # Query with named parameters (dictionary) -region_sites = client.db.ngl.read_sql( +region_sites = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_LAT > %(min_lat)s AND SITE_LON < %(max_lon)s", params={"min_lat": 35.0, "max_lon": -115.0} ) @@ -143,12 +143,12 @@ The NGL database contains comprehensive data on soil liquefaction case histories ```python # Explore database structure -tables_info = client.db.ngl.read_sql("SHOW TABLES") +tables_info = ds.db.ngl.read_sql("SHOW TABLES") print("Available tables:") print(tables_info) # Get table structure -site_structure = client.db.ngl.read_sql("DESCRIBE SITE") +site_structure = ds.db.ngl.read_sql("DESCRIBE SITE") print("SITE table structure:") print(site_structure) ``` @@ -157,7 +157,7 @@ print(site_structure) ```python # Site information -sites = client.db.ngl.read_sql(""" +sites = ds.db.ngl.read_sql(""" SELECT SITE_ID, SITE_NAME, SITE_LAT, SITE_LON, SITE_GEOL FROM SITE WHERE SITE_STAT = 1 -- Active sites only @@ -165,7 +165,7 @@ sites = client.db.ngl.read_sql(""" """) # Sites with liquefaction data -liquefaction_sites = client.db.ngl.read_sql(""" +liquefaction_sites = ds.db.ngl.read_sql(""" SELECT DISTINCT s.SITE_NAME, s.SITE_LAT, s.SITE_LON FROM SITE s JOIN RECORD r ON s.SITE_ID = r.SITE_ID @@ -174,7 +174,7 @@ liquefaction_sites = client.db.ngl.read_sql(""" """) # Earthquake events -earthquakes = client.db.ngl.read_sql(""" +earthquakes = ds.db.ngl.read_sql(""" SELECT DISTINCT EVENT_NAME, EVENT_DATE, EVENT_MAG FROM EVENT WHERE EVENT_STAT = 1 @@ -183,7 +183,7 @@ earthquakes = client.db.ngl.read_sql(""" """) # CPT data summary -cpt_summary = client.db.ngl.read_sql(""" +cpt_summary = ds.db.ngl.read_sql(""" SELECT COUNT(*) as total_cpts, AVG(CPT_DEPTH) as avg_depth, @@ -198,7 +198,7 @@ cpt_summary = client.db.ngl.read_sql(""" ```python # Sites with high liquefaction potential -high_risk_sites = client.db.ngl.read_sql(""" +high_risk_sites = ds.db.ngl.read_sql(""" SELECT s.SITE_NAME, s.SITE_LAT, @@ -216,7 +216,7 @@ high_risk_sites = client.db.ngl.read_sql(""" """) # Correlation between soil properties and liquefaction -soil_correlation = client.db.ngl.read_sql(""" +soil_correlation = ds.db.ngl.read_sql(""" SELECT cpt.CPT_FC as fines_content, cpt.CPT_D50 as median_grain_size, @@ -243,7 +243,7 @@ The earthquake recovery database contains data on post-earthquake recovery proce ```python # Recovery milestones -recovery_data = client.db.eq.read_sql(""" +recovery_data = ds.db.eq.read_sql(""" SELECT event_name, recovery_metric, @@ -255,7 +255,7 @@ recovery_data = client.db.eq.read_sql(""" """) # Economic impact analysis -economic_impact = client.db.eq.read_sql(""" +economic_impact = ds.db.eq.read_sql(""" SELECT region, AVG(economic_loss_millions) as avg_loss, @@ -275,7 +275,7 @@ The VP database contains model validation data and benchmarks. ```python # Model performance metrics -model_performance = client.db.vp.read_sql(""" +model_performance = ds.db.vp.read_sql(""" SELECT model_name, benchmark_case, @@ -288,7 +288,7 @@ model_performance = client.db.vp.read_sql(""" """) # Benchmark cases -benchmarks = client.db.vp.read_sql(""" +benchmarks = ds.db.vp.read_sql(""" SELECT benchmark_id, benchmark_name, @@ -309,7 +309,7 @@ import pandas as pd import matplotlib.pyplot as plt # Get site data for analysis -sites_df = client.db.ngl.read_sql(""" +sites_df = ds.db.ngl.read_sql(""" SELECT SITE_LAT, SITE_LON, SITE_GEOL FROM SITE WHERE SITE_STAT = 1 AND SITE_LAT IS NOT NULL @@ -332,7 +332,7 @@ sites_df.to_csv("ngl_sites.csv", index=False) ```python # Earthquake timeline -earthquake_timeline = client.db.ngl.read_sql(""" +earthquake_timeline = ds.db.ngl.read_sql(""" SELECT EVENT_DATE, EVENT_NAME, @@ -364,7 +364,7 @@ print(decade_summary) ```python # Sites by geographic region -regional_analysis = client.db.ngl.read_sql(""" +regional_analysis = ds.db.ngl.read_sql(""" SELECT CASE WHEN SITE_LAT > 40 THEN 'Northern' @@ -395,7 +395,7 @@ print(regional_analysis) ```python # Access database connection directly -ngl_db = client.db.ngl +ngl_db = ds.db.ngl # Check connection status try: @@ -421,7 +421,7 @@ queries = [ ] for query in queries: - result = client.db.ngl.read_sql(query) + result = ds.db.ngl.read_sql(query) print(f"{query}: {result.iloc[0, 0]}") ``` @@ -431,7 +431,7 @@ for query in queries: ```python try: - df = client.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") + df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") print("Query successful") except Exception as e: print(f"Database error: {e}") @@ -452,7 +452,7 @@ except Exception as e: ```python try: # Intentionally bad query - df = client.db.ngl.read_sql("SELECT * FROM NONEXISTENT_TABLE") + df = ds.db.ngl.read_sql("SELECT * FROM NONEXISTENT_TABLE") except Exception as e: print(f"SQL Error: {e}") @@ -468,13 +468,13 @@ except Exception as e: ### 1. Use Parameterized Queries ```python # Good - prevents SQL injection -safe_query = client.db.ngl.read_sql( +safe_query = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_NAME = %s", params=[user_input] ) # Dangerous - vulnerable to SQL injection -dangerous_query = client.db.ngl.read_sql( +dangerous_query = ds.db.ngl.read_sql( f"SELECT * FROM SITE WHERE SITE_NAME = '{user_input}'" ) ``` @@ -482,7 +482,7 @@ dangerous_query = client.db.ngl.read_sql( ### 2. Limit Result Sets ```python # Good - use LIMIT for large tables -limited_query = client.db.ngl.read_sql( +limited_query = ds.db.ngl.read_sql( "SELECT * FROM LARGE_TABLE LIMIT 1000" ) @@ -490,7 +490,7 @@ limited_query = client.db.ngl.read_sql( offset = 0 batch_size = 1000 while True: - batch = client.db.ngl.read_sql( + batch = ds.db.ngl.read_sql( "SELECT * FROM LARGE_TABLE LIMIT %s OFFSET %s", params=[batch_size, offset] ) @@ -503,7 +503,7 @@ while True: ### 3. Efficient Joins ```python # Good - use indexes and appropriate joins -efficient_query = client.db.ngl.read_sql(""" +efficient_query = ds.db.ngl.read_sql(""" SELECT s.SITE_NAME, COUNT(r.RECORD_ID) as record_count FROM SITE s LEFT JOIN RECORD r ON s.SITE_ID = r.SITE_ID @@ -517,7 +517,7 @@ efficient_query = client.db.ngl.read_sql(""" ### 4. Data Validation ```python # Good - validate data before analysis -df = client.db.ngl.read_sql("SELECT SITE_LAT, SITE_LON FROM SITE") +df = ds.db.ngl.read_sql("SELECT SITE_LAT, SITE_LON FROM SITE") # Check for missing values missing_coords = df.isnull().sum() @@ -538,7 +538,7 @@ print(f"Valid coordinates: {len(valid_coords)}/{len(df)}") ```python # Query data -df = client.db.ngl.read_sql(""" +df = ds.db.ngl.read_sql(""" SELECT s.SITE_NAME, s.SITE_LAT, s.SITE_LON, e.EVENT_NAME, e.EVENT_MAG FROM SITE s JOIN RECORD r ON s.SITE_ID = r.SITE_ID @@ -573,7 +573,7 @@ from sklearn.preprocessing import StandardScaler from sklearn.model_selection import train_test_split # Get numeric features -features_df = client.db.ngl.read_sql(""" +features_df = ds.db.ngl.read_sql(""" SELECT cpt.CPT_DEPTH, cpt.CPT_QC, diff --git a/docs/examples/database.md b/docs/examples/database.md index daa1371..f2b4d2b 100644 --- a/docs/examples/database.md +++ b/docs/examples/database.md @@ -29,7 +29,7 @@ warnings.filterwarnings('ignore') # Initialize DSClient try: print("Initializing DSClient...") - client = DSClient() + ds = DSClient() print("DSClient initialized successfully") # Test database connectivity @@ -37,20 +37,20 @@ try: # Test NGL database try: - test_ngl = client.db.ngl.read_sql("SELECT COUNT(*) as count FROM SITE") + test_ngl = ds.db.ngl.read_sql("SELECT COUNT(*) as count FROM SITE") print(f"NGL Database: {test_ngl['count'].iloc[0]} sites available") except Exception as e: print(f"NGL Database connection failed: {e}") # Test other databases (if available) try: - test_vp = client.db.vp.read_sql("SELECT COUNT(*) as count FROM information_schema.tables") + test_vp = ds.db.vp.read_sql("SELECT COUNT(*) as count FROM information_schema.tables") print(f"VP Database: Connected successfully") except Exception as e: print(f"VP Database: {e}") try: - test_eq = client.db.eq.read_sql("SELECT COUNT(*) as count FROM information_schema.tables") + test_eq = ds.db.eq.read_sql("SELECT COUNT(*) as count FROM information_schema.tables") print(f"EQ Database: Connected successfully") except Exception as e: print(f"EQ Database: {e}") @@ -69,7 +69,7 @@ print("=" * 50) # List all tables try: - tables = client.db.ngl.read_sql("SHOW TABLES") + tables = ds.db.ngl.read_sql("SHOW TABLES") print(f"Available tables ({len(tables)}):") for i, table in enumerate(tables.iloc[:, 0], 1): print(f" {i:2d}. {table}") @@ -80,7 +80,7 @@ try: print(f"\n Key Table Structures:") for table in key_tables: try: - structure = client.db.ngl.read_sql(f"DESCRIBE {table}") + structure = ds.db.ngl.read_sql(f"DESCRIBE {table}") print(f"\n {table} table:") print(f"Columns: {len(structure)}") for _, row in structure.head(5).iterrows(): @@ -105,7 +105,7 @@ print("=" * 45) try: # Site statistics - site_stats = client.db.ngl.read_sql(""" + site_stats = ds.db.ngl.read_sql(""" SELECT COUNT(*) as total_sites, COUNT(DISTINCT SITE_GEOL) as unique_geologies, @@ -125,7 +125,7 @@ try: print(f"Longitude range: {stats['min_longitude']:.2f}° to {stats['max_longitude']:.2f}°") # Record statistics - record_stats = client.db.ngl.read_sql(""" + record_stats = ds.db.ngl.read_sql(""" SELECT COUNT(*) as total_records, COUNT(DISTINCT EVENT_ID) as unique_events, @@ -141,7 +141,7 @@ try: print(f"Sites with records: {rec_stats['sites_with_records']}") # Event statistics - event_stats = client.db.ngl.read_sql(""" + event_stats = ds.db.ngl.read_sql(""" SELECT COUNT(*) as total_events, MIN(EVENT_MAG) as min_magnitude, @@ -173,7 +173,7 @@ print("=" * 40) try: # Sites by country/region (using latitude/longitude boundaries) - geographic_distribution = client.db.ngl.read_sql(""" + geographic_distribution = ds.db.ngl.read_sql(""" SELECT CASE WHEN SITE_LAT > 49 THEN 'Canada' @@ -199,7 +199,7 @@ try: print(f" {row['region']:15}: {row['site_count']:3d} sites (avg: {row['avg_latitude']:6.2f}°, {row['avg_longitude']:7.2f}°)") # California sites analysis (high seismic activity region) - california_sites = client.db.ngl.read_sql(""" + california_sites = ds.db.ngl.read_sql(""" SELECT s.SITE_NAME, s.SITE_LAT, @@ -233,7 +233,7 @@ print("=" * 35) try: # Major earthquakes with liquefaction data - major_earthquakes = client.db.ngl.read_sql(""" + major_earthquakes = ds.db.ngl.read_sql(""" SELECT e.EVENT_NAME, e.EVENT_DATE, @@ -260,7 +260,7 @@ try: print(f"{row['EVENT_NAME'][:24]:25} {str(row['EVENT_DATE'])[:10]:12} {row['EVENT_MAG']:4.1f} {row['affected_sites']:6d} {row['total_records']:8d} {row['liquefaction_cases']:6d} ({liq_rate:4.1f}%)") # Magnitude distribution - magnitude_distribution = client.db.ngl.read_sql(""" + magnitude_distribution = ds.db.ngl.read_sql(""" SELECT CASE WHEN EVENT_MAG < 5.0 THEN 'M < 5.0' @@ -295,7 +295,7 @@ print("=" * 30) try: # Liquefaction susceptibility by soil type - soil_liquefaction = client.db.ngl.read_sql(""" + soil_liquefaction = ds.db.ngl.read_sql(""" SELECT s.SITE_GEOL as geology, COUNT(DISTINCT r.RECORD_ID) as total_records, @@ -320,7 +320,7 @@ try: print(f"{row['geology'][:24]:25} {row['total_records']:8d} {row['liquefaction_cases']:10d} {row['liquefaction_rate']:8.1f} {row['avg_magnitude']:8.2f} {row['unique_sites']:6d}") # CPT-based analysis (if CPT data is available) - cpt_liquefaction = client.db.ngl.read_sql(""" + cpt_liquefaction = ds.db.ngl.read_sql(""" SELECT CASE WHEN cpt.CPT_FC < 10 THEN 'Clean Sand (FC < 10%)' @@ -350,7 +350,7 @@ try: print(f"{row['soil_classification']:25} {row['cpt_count']:10d} {row['liquefaction_cases']:10d} {row['liquefaction_rate']:8.1f} {row['avg_tip_resistance']:8.1f} {row['avg_magnitude']:8.2f}") # Magnitude vs liquefaction relationship - magnitude_liquefaction = client.db.ngl.read_sql(""" + magnitude_liquefaction = ds.db.ngl.read_sql(""" SELECT CASE WHEN e.EVENT_MAG < 5.5 THEN 'M < 5.5' @@ -386,7 +386,7 @@ print("=" * 25) try: # Earthquake timeline by decade - temporal_analysis = client.db.ngl.read_sql(""" + temporal_analysis = ds.db.ngl.read_sql(""" SELECT FLOOR(YEAR(e.EVENT_DATE) / 10) * 10 as decade, COUNT(DISTINCT e.EVENT_ID) as earthquake_count, @@ -410,7 +410,7 @@ try: print(f"{decade_str:8} {row['earthquake_count']:8d} {row['record_count']:8d} {row['liquefaction_cases']:10d} {row['avg_magnitude']:8.2f} {row['max_magnitude']:8.2f}") # Recent significant events (last 30 years) - recent_events = client.db.ngl.read_sql(""" + recent_events = ds.db.ngl.read_sql(""" SELECT e.EVENT_NAME, e.EVENT_DATE, @@ -448,7 +448,7 @@ print("=" * 35) try: # Correlation between earthquake parameters and liquefaction - correlation_data = client.db.ngl.read_sql(""" + correlation_data = ds.db.ngl.read_sql(""" SELECT e.EVENT_MAG as magnitude, cpt.CPT_DEPTH as depth, @@ -535,7 +535,7 @@ try: timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") # Export site data with coordinates - sites_export = client.db.ngl.read_sql(""" + sites_export = ds.db.ngl.read_sql(""" SELECT s.SITE_ID, s.SITE_NAME, @@ -556,7 +556,7 @@ try: """) # Export earthquake events - events_export = client.db.ngl.read_sql(""" + events_export = ds.db.ngl.read_sql(""" SELECT e.EVENT_ID, e.EVENT_NAME, @@ -575,7 +575,7 @@ try: """) # Export summary statistics - summary_export = client.db.ngl.read_sql(""" + summary_export = ds.db.ngl.read_sql(""" SELECT 'Total Sites' as metric, COUNT(DISTINCT s.SITE_ID) as value @@ -682,7 +682,7 @@ print("=" * 40) # Research Question 1: Distance-dependent liquefaction try: - distance_analysis = client.db.ngl.read_sql(""" + distance_analysis = ds.db.ngl.read_sql(""" SELECT CASE WHEN distance_km < 10 THEN '< 10 km' @@ -732,7 +732,7 @@ except Exception as e: # Research Question 2: Depth-dependent liquefaction susceptibility try: - depth_analysis = client.db.ngl.read_sql(""" + depth_analysis = ds.db.ngl.read_sql(""" SELECT CASE WHEN cpt.CPT_DEPTH < 5 THEN '0-5 m' @@ -780,7 +780,7 @@ try: # 1. Use LIMIT for large datasets print("\n1. Using LIMIT for large datasets:") large_query_start = datetime.now() - limited_results = client.db.ngl.read_sql(""" + limited_results = ds.db.ngl.read_sql(""" SELECT s.SITE_NAME, s.SITE_LAT, s.SITE_LON FROM SITE s WHERE s.SITE_STAT = 1 @@ -792,7 +792,7 @@ try: # 2. Use WHERE clauses to filter early print("\n2. Filtering with WHERE clauses:") filtered_query_start = datetime.now() - filtered_results = client.db.ngl.read_sql(""" + filtered_results = ds.db.ngl.read_sql(""" SELECT COUNT(*) as count FROM SITE s WHERE s.SITE_STAT = 1 @@ -807,7 +807,7 @@ try: print("\n3. Parameterized queries (secure):") site_name = "Amagasaki" param_query_start = datetime.now() - param_results = client.db.ngl.read_sql( + param_results = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_NAME = %s AND SITE_STAT = 1", params=[site_name] ) @@ -818,7 +818,7 @@ try: # 4. Efficient aggregation print("\n4. Efficient aggregation:") agg_query_start = datetime.now() - agg_results = client.db.ngl.read_sql(""" + agg_results = ds.db.ngl.read_sql(""" SELECT s.SITE_GEOL, COUNT(*) as site_count, @@ -856,7 +856,7 @@ print("=" * 60) try: # Get final statistics - final_stats = client.db.ngl.read_sql(""" + final_stats = ds.db.ngl.read_sql(""" SELECT (SELECT COUNT(*) FROM SITE WHERE SITE_STAT = 1) as total_sites, (SELECT COUNT(*) FROM EVENT WHERE EVENT_STAT = 1) as total_events, diff --git a/docs/index.md b/docs/index.md index 7e058a0..f90842f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -42,21 +42,21 @@ Get started with dapi in just a few lines: from dapi import DSClient # Initialize client (handles authentication automatically) -client = DSClient() +ds = DSClient() # Submit a job -job_request = client.jobs.generate_request( +job_request = ds.jobs.generate_request( app_id="matlab-r2023a", input_dir_uri="/MyData/analysis/input/", script_filename="run_analysis.m" ) -job = client.jobs.submit_request(job_request) +job = ds.jobs.submit_request(job_request) # Monitor progress final_status = job.monitor() # Query research databases -df = client.db.ngl.read_sql("SELECT * FROM SITE LIMIT 10") +df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 10") ``` ## Getting Started diff --git a/docs/jobs.md b/docs/jobs.md index 2a98d2c..e21d036 100644 --- a/docs/jobs.md +++ b/docs/jobs.md @@ -6,12 +6,43 @@ This guide covers everything you need to know about submitting, monitoring, and dapi provides a high-level interface for working with TAPIS v3 jobs on DesignSafe. You can: +- **List** past jobs with filtering and search - **Discover** available applications - **Generate** job requests with automatic parameter mapping - **Submit** jobs to DesignSafe compute resources - **Monitor** job progress with real-time updates - **Manage** job outputs and results +## Listing Jobs + +Browse your job history as a pandas DataFrame with optional filtering. + +```python +from dapi import DSClient + +ds = DSClient() + +# List all recent jobs (default: last 100) +df = ds.jobs.list() +print(df[["name", "uuid", "status", "appId", "created_dt"]]) + +# Filter by application +df = ds.jobs.list(app_id="opensees-mp-s3") + +# Filter by status +df = ds.jobs.list(status="FINISHED") + +# Combine filters and increase limit +df = ds.jobs.list(app_id="matlab-r2023a", status="FAILED", limit=500) + +# Use pandas for further analysis +finished = df[df["status"] == "FINISHED"] +print(f"Finished jobs: {len(finished)}") +print(finished.groupby("appId").size()) +``` + +The returned DataFrame includes formatted datetime columns (`created_dt`, `ended_dt`, `created_date`, etc.) for easy time-based analysis. + ## Application Discovery ### Finding Applications @@ -19,23 +50,23 @@ dapi provides a high-level interface for working with TAPIS v3 jobs on DesignSaf ```python from dapi import DSClient -client = DSClient() +ds = DSClient() # Find all applications -all_apps = client.apps.find("", verbose=False) +all_apps = ds.apps.find("", verbose=False) print(f"Found {len(all_apps)} applications") # Search for specific applications -matlab_apps = client.apps.find("matlab", verbose=True) -opensees_apps = client.apps.find("opensees", verbose=True) -mpm_apps = client.apps.find("mpm", verbose=True) +matlab_apps = ds.apps.find("matlab", verbose=True) +opensees_apps = ds.apps.find("opensees", verbose=True) +mpm_apps = ds.apps.find("mpm", verbose=True) ``` ### Getting Application Details ```python # Get detailed information about an application -app_details = client.apps.get_details("mpm-s3", verbose=True) +app_details = ds.apps.get_details("mpm-s3", verbose=True) print(f"App: {app_details.id}") print(f"Version: {app_details.version}") @@ -62,10 +93,10 @@ print(f"Default Cores: {app_details.jobAttributes.coresPerNode}") ```python # 1. Prepare input directory input_path = "/MyData/analysis/input/" -input_uri = client.files.translate_path_to_uri(input_path, verify_exists=True) +input_uri = ds.files.translate_path_to_uri(input_path, verify_exists=True) # 2. Generate job request -job_request = client.jobs.generate_request( +job_request = ds.jobs.generate_request( app_id="matlab-r2023a", input_dir_uri=input_uri, script_filename="run_analysis.m", @@ -74,18 +105,18 @@ job_request = client.jobs.generate_request( ) # 3. Submit job -job = client.jobs.submit_request(job_request) +job = ds.jobs.submit_request(job_request) print(f"Job submitted: {job.uuid}") ``` ### Advanced Job Configuration ```python -job_request = client.jobs.generate_request( +job_request = ds.jobs.generate_request( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="mpm.json", - + # Resource requirements max_minutes=120, node_count=2, @@ -126,7 +157,7 @@ job_request = client.jobs.generate_request( ```python # Generate base request -job_request = client.jobs.generate_request(...) +job_request = ds.jobs.generate_request(...) # Modify before submission job_request["name"] = "custom_job_name" @@ -146,7 +177,7 @@ job_request["parameterSet"]["envVariables"].append({ }) # Submit modified request -job = client.jobs.submit_request(job_request) +job = ds.jobs.submit_request(job_request) ``` ## Job Monitoring @@ -155,7 +186,7 @@ job = client.jobs.submit_request(job_request) ```python # Submit job -job = client.jobs.submit_request(job_request) +job = ds.jobs.submit_request(job_request) # Monitor with progress bars final_status = job.monitor( @@ -164,7 +195,7 @@ final_status = job.monitor( ) # Interpret results -client.jobs.interpret_status(final_status, job.uuid) +ds.jobs.interpret_status(final_status, job.uuid) ``` ### Manual Status Checking @@ -259,7 +290,7 @@ archive_uri = job.archive_uri print(f"Job archive: {archive_uri}") # Use files interface to browse archive -files = client.files.list(archive_uri) +files = ds.files.list(archive_uri) for file in files: print(f"- {file.name}") ``` @@ -294,7 +325,7 @@ job.download_output("results.mat", "/local/path/results.mat") job.download_output("output_data.csv", "/local/analysis/data.csv") # Download using files interface -client.files.download( +ds.files.download( f"{archive_uri}/results.mat", "/local/path/results.mat" ) @@ -325,7 +356,7 @@ The `cancel()` method sends a cancellation request to Tapis. Note that: from dapi import SubmittedJob job_uuid = "12345678-1234-1234-1234-123456789abc" -resumed_job = SubmittedJob(client._tapis, job_uuid) +resumed_job = SubmittedJob(ds._tapis, job_uuid) # Continue monitoring final_status = resumed_job.monitor() @@ -336,7 +367,7 @@ final_status = resumed_job.monitor() ```python # Monitor multiple jobs job_uuids = ["uuid1", "uuid2", "uuid3"] -jobs = [SubmittedJob(client._tapis, uuid) for uuid in job_uuids] +jobs = [SubmittedJob(ds._tapis, uuid) for uuid in job_uuids] # Check all statuses for job in jobs: @@ -357,7 +388,7 @@ for job in jobs: ```python # List available queues for a system -frontera_queues = client.systems.list_queues("frontera") +frontera_queues = ds.systems.list_queues("frontera") for queue in frontera_queues: print(f"Queue: {queue.name}") print(f"Max runtime: {queue.maxRequestedTime} minutes") @@ -373,7 +404,7 @@ print(f"Development queue available: {dev_queue_exists}") ```python # Get system information try: - queues = client.systems.list_queues("stampede3") + queues = ds.systems.list_queues("stampede3") print(f"Stampede3 has {len(queues)} available queues") except Exception as e: print(f"Cannot access Stampede3: {e}") @@ -385,7 +416,7 @@ except Exception as e: ```python # Submit multiple jobs with different parameters -base_request = client.jobs.generate_request( +base_request = ds.jobs.generate_request( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="template.json", @@ -419,7 +450,7 @@ for i, params in enumerate(parameters): ]) # Submit job - job = client.jobs.submit_request(job_req) + job = ds.jobs.submit_request(job_req) submitted_jobs.append(job) print(f"Submitted job {i+1}/{len(parameters)}: {job.uuid}") @@ -436,7 +467,7 @@ for i, job in enumerate(submitted_jobs): ```python # Submit jobs with dependencies (manual coordination) # Job 1: Preprocessing -prep_job = client.jobs.submit_request(preprocessing_request) +prep_job = ds.jobs.submit_request(preprocessing_request) prep_status = prep_job.monitor() if prep_status == "FINISHED": @@ -449,7 +480,7 @@ if prep_status == "FINISHED": "targetPath": "preprocessed" }) - main_job = client.jobs.submit_request(main_request) + main_job = ds.jobs.submit_request(main_request) main_status = main_job.monitor() if main_status == "FINISHED": @@ -462,7 +493,7 @@ if prep_status == "FINISHED": "targetPath": "results" }) - post_job = client.jobs.submit_request(post_request) + post_job = ds.jobs.submit_request(post_request) final_status = post_job.monitor() print(f"Pipeline complete. Final status: {final_status}") @@ -477,9 +508,9 @@ from dapi import JobSubmissionError, JobMonitorError try: # Job submission - job = client.jobs.submit_request(job_request) + job = ds.jobs.submit_request(job_request) final_status = job.monitor() - + except JobSubmissionError as e: print(f"Job submission failed: {e}") @@ -535,7 +566,7 @@ if final_status == "FAILED": ### 1. Resource Planning ```python # Choose appropriate resources -job_request = client.jobs.generate_request( +job_request = ds.jobs.generate_request( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="analysis.json", @@ -561,7 +592,7 @@ job_request["tags"] = ["research", "seismic", site_id, method] max_retries = 3 for attempt in range(max_retries): try: - job = client.jobs.submit_request(job_request) + job = ds.jobs.submit_request(job_request) final_status = job.monitor() break except JobSubmissionError as e: diff --git a/docs/quickstart.md b/docs/quickstart.md index 33a3176..95ee7c9 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -16,13 +16,13 @@ Here's a complete example that demonstrates the core dapi functionality: from dapi import DSClient # 1. Initialize client (handles authentication) -client = DSClient() +ds = DSClient() # 2. Find available applications -matlab_apps = client.apps.find("matlab", verbose=True) +matlab_apps = ds.apps.find("matlab", verbose=True) # 3. Submit a simple job -job_request = client.jobs.generate_request( +job_request = ds.jobs.generate_request( app_id="matlab-r2023a", input_dir_uri="/MyData/analysis/input/", script_filename="run_analysis.m", @@ -31,7 +31,7 @@ job_request = client.jobs.generate_request( ) # 4. Submit and monitor -job = client.jobs.submit_request(job_request) +job = ds.jobs.submit_request(job_request) final_status = job.monitor() # 5. Check results @@ -45,7 +45,7 @@ if final_status == "FINISHED": print(f"- {output.name} ({output.type})") # 6. Query research database -df = client.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") +df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") print(df) ``` @@ -57,7 +57,7 @@ print(df) from dapi import DSClient # This will prompt for credentials if not found in environment -client = DSClient() +ds = DSClient() # Output: Authentication successful. ``` @@ -67,7 +67,7 @@ Before submitting jobs, ensure you have TMS credentials on the execution system: ```python # One-time setup per system -- safe to call repeatedly -client.systems.establish_credentials("frontera") +ds.systems.establish_credentials("frontera") # Output: TMS credentials established for user 'myuser' on system 'frontera'. # Or if already established: @@ -80,16 +80,16 @@ See the [Authentication Guide](authentication.md#tms-credentials-execution-syste ```python # Find all applications -all_apps = client.apps.find("", verbose=False) +all_apps = ds.apps.find("", verbose=False) print(f"Found {len(all_apps)} total applications") # Find specific applications -mpm_apps = client.apps.find("mpm", verbose=True) -matlab_apps = client.apps.find("matlab", verbose=True) -opensees_apps = client.apps.find("opensees", verbose=True) +mpm_apps = ds.apps.find("mpm", verbose=True) +matlab_apps = ds.apps.find("matlab", verbose=True) +opensees_apps = ds.apps.find("opensees", verbose=True) # Get detailed information about an app -app_details = client.apps.get_details("mpm-s3", verbose=True) +app_details = ds.apps.get_details("mpm-s3", verbose=True) ``` ### Step 3: Prepare Your Input Files @@ -97,11 +97,11 @@ app_details = client.apps.get_details("mpm-s3", verbose=True) ```python # Translate DesignSafe paths to TAPIS URIs input_path = "/MyData/mpm-benchmarks/2d/uniaxial_stress/" -input_uri = client.files.translate_path_to_uri(input_path, verify_exists=True) +input_uri = ds.files.translate_path_to_uri(input_path, verify_exists=True) print(f"Input URI: {input_uri}") # List files in the directory -files = client.files.list(input_uri) +files = ds.files.list(input_uri) for file in files: print(f"- {file.name} ({file.type}, {file.size} bytes)") ``` @@ -110,7 +110,7 @@ for file in files: ```python # Generate a job request with automatic parameter mapping -job_request = client.jobs.generate_request( +job_request = ds.jobs.generate_request( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="mpm.json", @@ -129,14 +129,14 @@ job_request["tags"] = ["research", "mpm"] ```python # Submit the job -job = client.jobs.submit_request(job_request) +job = ds.jobs.submit_request(job_request) print(f"Job submitted: {job.uuid}") # Monitor with real-time progress final_status = job.monitor(interval=15) # Interpret the result -client.jobs.interpret_status(final_status, job.uuid) +ds.jobs.interpret_status(final_status, job.uuid) ``` ### Step 6: Access Job Results @@ -166,7 +166,7 @@ if final_status in job.TERMINAL_STATES: ```python # Query NGL database -ngl_data = client.db.ngl.read_sql(""" +ngl_data = ds.db.ngl.read_sql(""" SELECT SITE_NAME, SITE_LAT, SITE_LON FROM SITE WHERE SITE_LAT > 35 @@ -177,7 +177,7 @@ print(ngl_data) # Query with parameters site_name = "Amagasaki" -site_data = client.db.ngl.read_sql( +site_data = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_NAME = %s", params=[site_name] ) @@ -191,7 +191,7 @@ print(site_data) ```python # Submit MATLAB job -job_request = client.jobs.generate_request( +job_request = ds.jobs.generate_request( app_id="matlab-r2023a", input_dir_uri="/MyData/matlab/analysis/", script_filename="main.m", @@ -199,7 +199,7 @@ job_request = client.jobs.generate_request( allocation="your_allocation" ) -job = client.jobs.submit_request(job_request) +job = ds.jobs.submit_request(job_request) final_status = job.monitor() if final_status == "FINISHED": @@ -211,7 +211,7 @@ if final_status == "FINISHED": ```python # Submit OpenSees job -job_request = client.jobs.generate_request( +job_request = ds.jobs.generate_request( app_id="opensees-express", input_dir_uri="/MyData/opensees/earthquake/", script_filename="earthquake_analysis.tcl", @@ -219,7 +219,7 @@ job_request = client.jobs.generate_request( allocation="your_allocation" ) -job = client.jobs.submit_request(job_request) +job = ds.jobs.submit_request(job_request) final_status = job.monitor() ``` @@ -238,7 +238,7 @@ ORDER BY num_records DESC LIMIT 20 """ -df = client.db.ngl.read_sql(query) +df = ds.db.ngl.read_sql(query) print("Sites with most records in California:") print(df) @@ -255,7 +255,7 @@ import os os.environ['DEFAULT_ALLOCATION'] = 'your_tacc_allocation' # Now you can omit allocation in job requests -job_request = client.jobs.generate_request( +job_request = ds.jobs.generate_request( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="mpm.json" @@ -297,11 +297,11 @@ from dapi import ( ) try: - client = DSClient() + ds = DSClient() # Try to submit job - job_request = client.jobs.generate_request(...) - job = client.jobs.submit_request(job_request) + job_request = ds.jobs.generate_request(...) + job = ds.jobs.submit_request(job_request) final_status = job.monitor() except AuthenticationError as e: @@ -323,13 +323,13 @@ except Exception as e: ### 1. Always Verify Paths ```python # Good - verify path exists -input_uri = client.files.translate_path_to_uri( - "/MyData/analysis/", +input_uri = ds.files.translate_path_to_uri( + "/MyData/analysis/", verify_exists=True ) # Risk - path might not exist -input_uri = client.files.translate_path_to_uri("/MyData/analysis/") +input_uri = ds.files.translate_path_to_uri("/MyData/analysis/") ``` ### 2. Use Descriptive Job Names @@ -343,7 +343,7 @@ job_request["tags"] = ["earthquake", "site-A", "research"] ### 3. Handle Long-Running Jobs ```python # For long jobs, save job UUID for later monitoring -job = client.jobs.submit_request(job_request) +job = ds.jobs.submit_request(job_request) job_uuid = job.uuid # Save UUID to file or environment @@ -352,7 +352,7 @@ with open("current_job.txt", "w") as f: # Later, resume monitoring from dapi import SubmittedJob -saved_job = SubmittedJob(client._tapis, job_uuid) +saved_job = SubmittedJob(ds._tapis, job_uuid) final_status = saved_job.monitor() ``` diff --git a/tests/jobs/test_list_jobs.py b/tests/jobs/test_list_jobs.py new file mode 100644 index 0000000..a83a34d --- /dev/null +++ b/tests/jobs/test_list_jobs.py @@ -0,0 +1,126 @@ +import unittest +from unittest.mock import Mock, MagicMock + +import pandas as pd +from tapipy.errors import BaseTapyException + +from dapi.jobs import list_jobs +from dapi.exceptions import JobMonitorError + + +def _make_job(uuid, name, status, app_id, app_version="1.0", + created="2025-06-15T10:00:00.000Z", + ended="2025-06-15T11:00:00.000Z", + remote_started="2025-06-15T10:05:00.000Z", + last_updated="2025-06-15T11:00:00.000Z"): + """Create a mock TapisResult job object.""" + job = Mock() + job.__dict__ = { + "uuid": uuid, + "name": name, + "status": status, + "appId": app_id, + "appVersion": app_version, + "owner": "testuser", + "created": created, + "ended": ended, + "remoteStarted": remote_started, + "lastUpdated": last_updated, + "execSystemId": "frontera", + "archiveSystemId": "designsafe.storage.default", + "tenant": "designsafe", + } + return job + + +MOCK_JOBS = [ + _make_job("uuid-001", "matlab-run-1", "FINISHED", "matlab-r2023a"), + _make_job("uuid-002", "opensees-run-1", "FINISHED", "opensees-mp-s3"), + _make_job("uuid-003", "matlab-run-2", "FAILED", "matlab-r2023a"), + _make_job("uuid-004", "mpm-run-1", "RUNNING", "mpm-s3", + ended=None, remote_started="2025-06-15T10:10:00.000Z"), +] + + +class TestListJobs(unittest.TestCase): + def setUp(self): + self.t = MagicMock() + self.t.jobs.getJobList.return_value = MOCK_JOBS + + def test_returns_dataframe(self): + df = list_jobs(self.t) + self.assertIsInstance(df, pd.DataFrame) + self.assertEqual(len(df), 4) + + def test_empty_result(self): + self.t.jobs.getJobList.return_value = [] + df = list_jobs(self.t) + self.assertIsInstance(df, pd.DataFrame) + self.assertEqual(len(df), 0) + + def test_filter_by_app_id(self): + df = list_jobs(self.t, app_id="matlab-r2023a") + self.assertEqual(len(df), 2) + self.assertTrue((df["appId"] == "matlab-r2023a").all()) + + def test_filter_by_status(self): + df = list_jobs(self.t, status="FINISHED") + self.assertEqual(len(df), 2) + self.assertTrue((df["status"] == "FINISHED").all()) + + def test_filter_by_status_case_insensitive(self): + df = list_jobs(self.t, status="finished") + self.assertEqual(len(df), 2) + + def test_combined_filters(self): + df = list_jobs(self.t, app_id="matlab-r2023a", status="FAILED") + self.assertEqual(len(df), 1) + self.assertEqual(df.iloc[0]["uuid"], "uuid-003") + + def test_datetime_columns_exist(self): + df = list_jobs(self.t) + for col in ["created_dt", "created_date", "ended_dt", "ended_date", + "remoteStarted_dt", "lastUpdated_dt"]: + self.assertIn(col, df.columns) + + def test_datetime_nat_for_missing(self): + df = list_jobs(self.t) + # uuid-004 has ended=None + mpm_row = df[df["uuid"] == "uuid-004"].iloc[0] + self.assertTrue(pd.isna(mpm_row["ended_dt"])) + + def test_priority_column_order(self): + df = list_jobs(self.t) + expected_first = ["name", "uuid", "status", "appId", "appVersion", + "created_dt", "ended_dt"] + actual_first = list(df.columns[:len(expected_first)]) + self.assertEqual(actual_first, expected_first) + + def test_passes_limit_to_api(self): + list_jobs(self.t, limit=50) + self.t.jobs.getJobList.assert_called_once_with( + limit=50, orderBy="created(desc)" + ) + + def test_raises_job_monitor_error_on_api_failure(self): + self.t.jobs.getJobList.side_effect = BaseTapyException("server error") + with self.assertRaises(JobMonitorError): + list_jobs(self.t) + + def test_verbose_prints_count(self): + # Should not raise + df = list_jobs(self.t, verbose=True) + self.assertEqual(len(df), 4) + + def test_index_is_reset(self): + df = list_jobs(self.t, app_id="matlab-r2023a") + self.assertEqual(list(df.index), [0, 1]) + + def test_no_filter_returns_all(self): + df = list_jobs(self.t) + uuids = set(df["uuid"]) + self.assertEqual(uuids, {"uuid-001", "uuid-002", "uuid-003", "uuid-004"}) + + +if __name__ == "__main__": + unittest.main() From a62b99a6ae7f228e366df73249e9a961939e748f Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 09:57:13 -0500 Subject: [PATCH 07/21] Auto-TMS on DSClient init, configurable jobs list output - DSClient() now auto-establishes TMS credentials on TACC systems (frontera, stampede3, ls6). Skips gracefully on errors/no allocation. - ds.jobs.list() supports output="df" (default), "list", or "raw". - Simplify auth docs: DB credentials use built-in defaults, no .env needed. - Remove manual TMS step from quickstart (now automatic). - 59 tests pass (8 new for setup_tms, 6 new for output formats). --- dapi/client.py | 20 ++++++-- dapi/jobs.py | 68 ++++++++++++++++++++------- dapi/systems.py | 76 ++++++++++++++++++++++++++++++- docs/authentication.md | 52 +++++---------------- docs/jobs.md | 19 +++++++- docs/quickstart.md | 15 ++---- tests/jobs/test_list_jobs.py | 34 ++++++++++++++ tests/systems/test_credentials.py | 76 +++++++++++++++++++++++++++++++ 8 files changed, 285 insertions(+), 75 deletions(-) diff --git a/dapi/client.py b/dapi/client.py index e227646..2d81681 100644 --- a/dapi/client.py +++ b/dapi/client.py @@ -92,6 +92,9 @@ def __init__(self, tapis_client: Optional[Tapis] = None, **auth_kwargs): self.systems = SystemMethods(self.tapis) self.db = DatabaseAccessor() + # Auto-setup TMS credentials on TACC execution systems + systems_module.setup_tms_credentials(self.tapis) + # --- AppMethods and FileMethods remain the same --- class AppMethods: @@ -582,34 +585,41 @@ def list( app_id: Optional[str] = None, status: Optional[str] = None, limit: int = 100, + output: str = "df", verbose: bool = False, ): - """List jobs as a pandas DataFrame with optional filtering. + """List jobs with optional filtering. - Fetches jobs from Tapis ordered by creation date (newest first) - and returns them as a DataFrame. Filters are applied client-side. + Fetches jobs from Tapis ordered by creation date (newest first). + Filters are applied client-side. Args: app_id (str, optional): Filter by application ID. status (str, optional): Filter by job status (e.g., "FINISHED"). Case-insensitive. limit (int, optional): Maximum jobs to fetch. Defaults to 100. + output (str, optional): Output format. "df" for pandas DataFrame + (default), "list" for list of dicts, "raw" for TapisResult + objects. verbose (bool, optional): Print job count. Defaults to False. Returns: - pd.DataFrame: Job metadata with formatted datetime columns. + Depends on output: DataFrame, list of dicts, or list of + TapisResult objects. Raises: JobMonitorError: If the Tapis API call fails. Example: >>> df = ds.jobs.list(app_id="matlab-r2023a", status="FINISHED") - >>> print(df[["name", "uuid", "status", "created_dt"]]) + >>> jobs = ds.jobs.list(output="list") + >>> raw = ds.jobs.list(limit=10, output="raw") """ return jobs_module.list_jobs( self._tapis, app_id=app_id, status=status, limit=limit, + output=output, verbose=verbose, ) diff --git a/dapi/jobs.py b/dapi/jobs.py index c312b20..1064790 100644 --- a/dapi/jobs.py +++ b/dapi/jobs.py @@ -1348,9 +1348,10 @@ def list_jobs( app_id: Optional[str] = None, status: Optional[str] = None, limit: int = 100, + output: str = "df", verbose: bool = False, -) -> pd.DataFrame: - """Fetch Tapis jobs and return them as a pandas DataFrame. +): + """Fetch Tapis jobs with optional filtering. Retrieves jobs from Tapis ordered by creation date (newest first) and optionally filters by app ID and/or status. Filters are applied @@ -1362,22 +1363,29 @@ def list_jobs( status: Filter by job status (e.g., "FINISHED", "FAILED"). Case-insensitive. limit: Maximum number of jobs to fetch from Tapis. Defaults to 100. + output: Output format. "df" returns a pandas DataFrame (default), + "list" returns a list of dicts, "raw" returns the raw + TapisResult objects. verbose: If True, prints the number of jobs found. Returns: - DataFrame with job metadata and formatted datetime columns. - Priority columns appear first: name, uuid, status, appId, appVersion, - created_dt, ended_dt. Additional datetime columns include _dt - (timezone-aware) and _date (date only) variants for created, ended, - remoteStarted, and lastUpdated. + Depends on ``output``: + - "df": pandas DataFrame with formatted datetime columns. + - "list": list of dicts with job metadata. + - "raw": list of TapisResult objects as returned by the API. Raises: JobMonitorError: If the Tapis API call fails. + ValueError: If output format is not recognized. Example: >>> df = list_jobs(t, app_id="matlab-r2023a", status="FINISHED") - >>> print(df[["name", "uuid", "status", "created_dt"]]) + >>> jobs = list_jobs(t, output="list") + >>> raw = list_jobs(t, limit=10, output="raw") """ + if output not in ("df", "list", "raw"): + raise ValueError(f"output must be 'df', 'list', or 'raw', got '{output}'") + try: jobs_list = tapis_client.jobs.getJobList( limit=limit, @@ -1391,17 +1399,48 @@ def list_jobs( if not jobs_list: if verbose: print("Found 0 jobs.") + if output == "raw": + return [] + if output == "list": + return [] return pd.DataFrame() + # For raw output, apply filters manually on TapisResult objects + if output == "raw": + results = jobs_list + if app_id: + results = [j for j in results if getattr(j, "appId", None) == app_id] + if status: + results = [ + j for j in results + if getattr(j, "status", "").upper() == status.upper() + ] + if verbose: + print(f"Found {len(results)} jobs.") + return results + # Convert TapisResult objects to dicts jobs_dicts = [job.__dict__ for job in jobs_list] - df = pd.DataFrame(jobs_dicts) # Apply client-side filters - if app_id and "appId" in df.columns: - df = df[df["appId"] == app_id] - if status and "status" in df.columns: - df = df[df["status"] == status.upper()] + if app_id: + jobs_dicts = [j for j in jobs_dicts if j.get("appId") == app_id] + if status: + jobs_dicts = [ + j for j in jobs_dicts if j.get("status", "").upper() == status.upper() + ] + + if verbose: + print(f"Found {len(jobs_dicts)} jobs.") + + if output == "list": + return jobs_dicts + + # Build DataFrame + df = pd.DataFrame(jobs_dicts) + + if df.empty: + return df # Add formatted datetime columns time_cols = ["created", "ended", "remoteStarted", "lastUpdated"] @@ -1421,7 +1460,4 @@ def list_jobs( df = df.reset_index(drop=True) - if verbose: - print(f"Found {len(df)} jobs.") - return df diff --git a/dapi/systems.py b/dapi/systems.py index a4cd5c4..fcc757f 100644 --- a/dapi/systems.py +++ b/dapi/systems.py @@ -1,7 +1,7 @@ # dapi/systems.py from tapipy.tapis import Tapis from tapipy.errors import BaseTapyException, UnauthorizedError, NotFoundError -from typing import List, Any, Optional +from typing import Dict, List, Any, Optional from .exceptions import SystemInfoError, CredentialError @@ -292,3 +292,77 @@ def revoke_credentials( f"Unexpected error revoking credentials for user '{effective_username}' " f"on system '{system_id}': {e}" ) from e + + +# Default TACC execution systems that use TMS_KEYS +TACC_SYSTEMS = ["frontera", "stampede3", "ls6"] + + +def setup_tms_credentials( + t: Tapis, + systems: Optional[List[str]] = None, +) -> Dict[str, str]: + """Check and establish TMS credentials on execution systems. + + For each system, checks if credentials exist and creates them if missing. + Failures are handled gracefully — a system that can't be reached or where + the user lacks an allocation is skipped with a warning. + + Args: + t: Authenticated Tapis client instance. + systems: List of system IDs to set up. Defaults to TACC_SYSTEMS + (frontera, stampede3, ls6). + + Returns: + Dict mapping system_id to status: "ready", "created", or "skipped". + """ + if systems is None: + systems = TACC_SYSTEMS + + username = getattr(t, "username", None) + if not username: + print("Warning: Could not determine username. Skipping TMS setup.") + return {s: "skipped" for s in systems} + + results = {} + + for system_id in systems: + try: + # Check if system uses TMS_KEYS + system_details = t.systems.getSystem(systemId=system_id) + authn_method = getattr(system_details, "defaultAuthnMethod", None) + + if authn_method != "TMS_KEYS": + results[system_id] = "skipped" + continue + + # Check existing credentials + if check_credentials(t, system_id, username): + results[system_id] = "ready" + continue + + # Try to create credentials + t.systems.createUserCredential( + systemId=system_id, + userName=username, + createTmsKeys=True, + ) + results[system_id] = "created" + + except Exception: + results[system_id] = "skipped" + + # Print summary + ready = [s for s, v in results.items() if v in ("ready", "created")] + created = [s for s, v in results.items() if v == "created"] + skipped = [s for s, v in results.items() if v == "skipped"] + + if ready: + msg = f"TMS credentials ready: {', '.join(ready)}" + if created: + msg += f" (newly created: {', '.join(created)})" + print(msg) + if skipped: + print(f"TMS credentials skipped: {', '.join(skipped)}") + + return results diff --git a/docs/authentication.md b/docs/authentication.md index 8dd9611..b89bdc6 100644 --- a/docs/authentication.md +++ b/docs/authentication.md @@ -327,65 +327,37 @@ print(os.access('.env', os.R_OK)) ### Database Connection Issues -For database-specific authentication issues: +Database connections use built-in public read-only credentials by default -- no `.env` setup is required for database access. If you need to override the defaults (e.g., for a private database instance), you can set environment variables: -```python -# Check database environment variables -import os -print("NGL_DB_USER:", os.getenv('NGL_DB_USER')) -print("VP_DB_USER:", os.getenv('VP_DB_USER')) -print("EQ_DB_USER:", os.getenv('EQ_DB_USER')) -``` - -Required database environment variables: ```bash -# NGL Database -export NGL_DB_USER="dspublic" -export NGL_DB_PASSWORD="your_password" -export NGL_DB_HOST="db_host" -export NGL_DB_PORT="3306" - -# VP Database -export VP_DB_USER="dspublic" -export VP_DB_PASSWORD="your_password" -export VP_DB_HOST="db_host" -export VP_DB_PORT="3306" - -# Earthquake Recovery Database -export EQ_DB_USER="dspublic" -export EQ_DB_PASSWORD="your_password" -export EQ_DB_HOST="db_host" -export EQ_DB_PORT="3306" +# Optional: override database credentials via .env or environment +NGL_DB_USER=your_user +NGL_DB_PASSWORD=your_password +NGL_DB_HOST=your_host +NGL_DB_PORT=3306 ``` -## Example: Complete Setup +The same pattern applies for VP (`VP_DB_*`) and Earthquake Recovery (`EQ_DB_*`) databases. -Here's a complete example of setting up authentication: +## Example: Complete Setup ```python -# 1. Create .env file +# 1. Create .env file (only Tapis credentials required) with open('.env', 'w') as f: f.write('DESIGNSAFE_USERNAME=your_username\n') f.write('DESIGNSAFE_PASSWORD=your_password\n') - f.write('NGL_DB_USER=dspublic\n') - f.write('NGL_DB_PASSWORD=your_db_password\n') - f.write('NGL_DB_HOST=db_host\n') - f.write('NGL_DB_PORT=3306\n') -# 2. Initialize client +# 2. Initialize client (auto-sets up TMS credentials) from dapi import DSClient ds = DSClient() -# 3. Test authentication -print("Testing TAPIS API access...") +# 3. Test apps = ds.apps.find("matlab", verbose=False) print(f"Found {len(apps)} MATLAB apps") -print("Testing database access...") +# Database works out of the box -- no extra credentials needed df = ds.db.ngl.read_sql("SELECT COUNT(*) FROM SITE") print(f"NGL database has {df.iloc[0, 0]} sites") - -print("All authentication successful!") ``` ## Troubleshooting diff --git a/docs/jobs.md b/docs/jobs.md index e21d036..af3b56e 100644 --- a/docs/jobs.md +++ b/docs/jobs.md @@ -22,7 +22,7 @@ from dapi import DSClient ds = DSClient() -# List all recent jobs (default: last 100) +# List all recent jobs (default: last 100, returns DataFrame) df = ds.jobs.list() print(df[["name", "uuid", "status", "appId", "created_dt"]]) @@ -41,7 +41,22 @@ print(f"Finished jobs: {len(finished)}") print(finished.groupby("appId").size()) ``` -The returned DataFrame includes formatted datetime columns (`created_dt`, `ended_dt`, `created_date`, etc.) for easy time-based analysis. +### Output Formats + +By default `list()` returns a pandas DataFrame. Use the `output` parameter for other formats: + +```python +# DataFrame (default) -- includes formatted datetime columns +df = ds.jobs.list() + +# List of dicts -- lightweight, no pandas dependency +jobs = ds.jobs.list(output="list") +for job in jobs: + print(f"{job['name']}: {job['status']}") + +# Raw TapisResult objects -- for advanced Tapis API usage +raw = ds.jobs.list(output="raw") +``` ## Application Discovery diff --git a/docs/quickstart.md b/docs/quickstart.md index 95ee7c9..d73e770 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -61,20 +61,13 @@ ds = DSClient() # Output: Authentication successful. ``` -### Step 1b: Establish TMS Credentials (One-Time) +`DSClient()` automatically sets up TMS credentials on TACC execution systems (Frontera, Stampede3, LS6). You'll see a summary like: -Before submitting jobs, ensure you have TMS credentials on the execution system: - -```python -# One-time setup per system -- safe to call repeatedly -ds.systems.establish_credentials("frontera") -# Output: TMS credentials established for user 'myuser' on system 'frontera'. - -# Or if already established: -# Output: Credentials already exist for user 'myuser' on system 'frontera'. No action taken. +``` +TMS credentials ready: frontera, stampede3, ls6 ``` -See the [Authentication Guide](authentication.md#tms-credentials-execution-system-access) for details. +Systems where you don't have an allocation are silently skipped. See the [Authentication Guide](authentication.md#tms-credentials-execution-system-access) for manual control. ### Step 2: Explore Available Applications diff --git a/tests/jobs/test_list_jobs.py b/tests/jobs/test_list_jobs.py index a83a34d..032a993 100644 --- a/tests/jobs/test_list_jobs.py +++ b/tests/jobs/test_list_jobs.py @@ -121,6 +121,40 @@ def test_no_filter_returns_all(self): uuids = set(df["uuid"]) self.assertEqual(uuids, {"uuid-001", "uuid-002", "uuid-003", "uuid-004"}) + # --- output format tests --- + + def test_output_list(self): + result = list_jobs(self.t, output="list") + self.assertIsInstance(result, list) + self.assertEqual(len(result), 4) + self.assertIsInstance(result[0], dict) + self.assertIn("uuid", result[0]) + + def test_output_list_with_filter(self): + result = list_jobs(self.t, app_id="matlab-r2023a", output="list") + self.assertEqual(len(result), 2) + self.assertTrue(all(j["appId"] == "matlab-r2023a" for j in result)) + + def test_output_raw(self): + result = list_jobs(self.t, output="raw") + self.assertIsInstance(result, list) + self.assertEqual(len(result), 4) + # Raw returns the original Mock objects + self.assertNotIsInstance(result[0], dict) + + def test_output_raw_with_filter(self): + result = list_jobs(self.t, status="RUNNING", output="raw") + self.assertEqual(len(result), 1) + + def test_output_empty_list(self): + self.t.jobs.getJobList.return_value = [] + self.assertEqual(list_jobs(self.t, output="list"), []) + self.assertEqual(list_jobs(self.t, output="raw"), []) + + def test_invalid_output_raises(self): + with self.assertRaises(ValueError): + list_jobs(self.t, output="xml") + if __name__ == "__main__": unittest.main() diff --git a/tests/systems/test_credentials.py b/tests/systems/test_credentials.py index 2f9a99b..73b523f 100644 --- a/tests/systems/test_credentials.py +++ b/tests/systems/test_credentials.py @@ -7,6 +7,7 @@ check_credentials, establish_credentials, revoke_credentials, + setup_tms_credentials, _resolve_username, ) from dapi.exceptions import CredentialError @@ -219,5 +220,80 @@ def test_raises_credential_error_on_generic_exception(self): revoke_credentials(self.t, "frontera", verbose=False) +class TestSetupTmsCredentials(unittest.TestCase): + def setUp(self): + self.t = MagicMock() + self.t.username = "testuser" + # Default: all systems use TMS_KEYS + self.mock_system = Mock() + self.mock_system.defaultAuthnMethod = "TMS_KEYS" + self.t.systems.getSystem.return_value = self.mock_system + + def test_all_systems_already_ready(self): + # checkUserCredential succeeds = credentials exist + self.t.systems.checkUserCredential.return_value = Mock() + results = setup_tms_credentials(self.t, systems=["frontera", "ls6"]) + self.assertEqual(results, {"frontera": "ready", "ls6": "ready"}) + self.t.systems.createUserCredential.assert_not_called() + + def test_creates_missing_credentials(self): + self.t.systems.checkUserCredential.side_effect = UnauthorizedError() + results = setup_tms_credentials(self.t, systems=["frontera"]) + self.assertEqual(results, {"frontera": "created"}) + self.t.systems.createUserCredential.assert_called_once_with( + systemId="frontera", userName="testuser", createTmsKeys=True + ) + + def test_skips_system_on_error(self): + self.t.systems.getSystem.side_effect = BaseTapyException("down") + results = setup_tms_credentials(self.t, systems=["frontera"]) + self.assertEqual(results, {"frontera": "skipped"}) + + def test_skips_non_tms_system(self): + self.mock_system.defaultAuthnMethod = "PASSWORD" + results = setup_tms_credentials(self.t, systems=["cloud-system"]) + self.assertEqual(results, {"cloud-system": "skipped"}) + + def test_mixed_results(self): + # frontera: creds exist, stampede3: needs creation, ls6: system error + def get_system_side_effect(systemId): + if systemId == "ls6": + raise BaseTapyException("not found") + return self.mock_system + + def check_cred_side_effect(systemId, userName): + if systemId == "frontera": + return Mock() + raise UnauthorizedError() + + self.t.systems.getSystem.side_effect = get_system_side_effect + self.t.systems.checkUserCredential.side_effect = check_cred_side_effect + + results = setup_tms_credentials( + self.t, systems=["frontera", "stampede3", "ls6"] + ) + self.assertEqual(results["frontera"], "ready") + self.assertEqual(results["stampede3"], "created") + self.assertEqual(results["ls6"], "skipped") + + def test_no_username_skips_all(self): + self.t.username = None + results = setup_tms_credentials(self.t, systems=["frontera"]) + self.assertEqual(results, {"frontera": "skipped"}) + + def test_uses_default_tacc_systems(self): + self.t.systems.checkUserCredential.return_value = Mock() + results = setup_tms_credentials(self.t) + self.assertIn("frontera", results) + self.assertIn("stampede3", results) + self.assertIn("ls6", results) + + def test_create_failure_skips_gracefully(self): + self.t.systems.checkUserCredential.side_effect = UnauthorizedError() + self.t.systems.createUserCredential.side_effect = BaseTapyException("fail") + results = setup_tms_credentials(self.t, systems=["frontera"]) + self.assertEqual(results, {"frontera": "skipped"}) + + if __name__ == "__main__": unittest.main() From e25a09c89ea1bc6a65e28bad5f152433d827724c Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 10:01:09 -0500 Subject: [PATCH 08/21] Clarify DB credentials are intentionally public read-only --- dapi/db/db.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dapi/db/db.py b/dapi/db/db.py index 7544ad5..7f005e6 100644 --- a/dapi/db/db.py +++ b/dapi/db/db.py @@ -79,6 +79,8 @@ def __init__(self, dbname="ngl"): config = db_config[dbname] env_prefix = config["env_prefix"] + # Public read-only credentials for DesignSafe research databases. + # Override via environment variables ({PREFIX}DB_USER, etc.) if needed. self.user = os.getenv(f"{env_prefix}DB_USER", "dspublic") self.password = os.getenv(f"{env_prefix}DB_PASSWORD", "R3ad0nlY") self.host = os.getenv(f"{env_prefix}DB_HOST", "129.114.52.174") From bcfdca4e9105710d53d4f98f59a4d9c216ff51d5 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 14:02:16 -0500 Subject: [PATCH 09/21] WIP: parametric sweeps and redo generate and submit --- README.md | 4 +- _toc.yml | 2 + dapi/__init__.py | 4 +- dapi/client.py | 126 ++++++++++++++++-- dapi/exceptions.py | 2 +- dapi/launcher.py | 139 ++++++++++++++++++++ docs/api/index.md | 5 +- docs/api/launcher.md | 17 +++ docs/authentication.md | 4 +- docs/examples.md | 14 ++ docs/examples/mpm.md | 6 +- docs/examples/openfoam.md | 8 +- docs/examples/opensees.md | 6 +- docs/examples/pylauncher.md | 131 +++++++++++++++++++ docs/index.md | 4 +- docs/jobs.md | 75 +++++++++-- docs/quickstart.md | 24 ++-- examples/mpm/mpm-minimal.ipynb | 131 +------------------ examples/mpm/mpm.ipynb | 158 +---------------------- examples/openfoam/openfoam-minimal.ipynb | 149 +-------------------- examples/openfoam/openfoam.ipynb | 157 +--------------------- examples/opensees/OpenSeesMP-dapi.ipynb | 94 +------------- examples/pylauncher_sweep.ipynb | 128 ++++++++++++++++++ myst.yml | 2 + tests/jobs/test_parametric_sweep.py | 150 +++++++++++++++++++++ 25 files changed, 825 insertions(+), 715 deletions(-) create mode 100644 dapi/launcher.py create mode 100644 docs/api/launcher.md create mode 100644 docs/examples/pylauncher.md create mode 100644 examples/pylauncher_sweep.ipynb create mode 100644 tests/jobs/test_parametric_sweep.py diff --git a/README.md b/README.md index 0b49a89..7d3cd76 100644 --- a/README.md +++ b/README.md @@ -79,14 +79,14 @@ client = DSClient() client.systems.establish_credentials("frontera") # Submit a job -job_request = client.jobs.generate_request( +job_request = client.jobs.generate( app_id="matlab-r2023a", input_dir_uri="/MyData/analysis/input/", script_filename="run_analysis.m", max_minutes=30, allocation="your_allocation" ) -job = client.jobs.submit_request(job_request) +job = client.jobs.submit(job_request) final_status = job.monitor() ``` diff --git a/_toc.yml b/_toc.yml index 4bb1c17..9f8732b 100644 --- a/_toc.yml +++ b/_toc.yml @@ -20,6 +20,7 @@ parts: - file: docs/examples/mpm - file: docs/examples/opensees - file: docs/examples/openfoam + - file: docs/examples/pylauncher - file: docs/examples/tms_credentials - file: docs/examples/database - caption: API Reference @@ -27,6 +28,7 @@ parts: - file: docs/api/index - file: docs/api/client - file: docs/api/jobs + - file: docs/api/launcher - file: docs/api/files - file: docs/api/apps - file: docs/api/systems diff --git a/dapi/__init__.py b/dapi/__init__.py index 04a51d3..72f897f 100644 --- a/dapi/__init__.py +++ b/dapi/__init__.py @@ -35,12 +35,12 @@ >>> files = client.files.list("/MyData/uploads/") >>> # Job submission and monitoring - >>> job_request = client.jobs.generate_request( + >>> job_request = client.jobs.generate( ... app_id="matlab-r2023a", ... input_dir_uri="/MyData/analysis/input/", ... script_filename="run_analysis.m" ... ) - >>> job = client.jobs.submit_request(job_request) + >>> job = client.jobs.submit(job_request) >>> final_status = job.monitor() >>> # Database access diff --git a/dapi/client.py b/dapi/client.py index 2d81681..953e2aa 100644 --- a/dapi/client.py +++ b/dapi/client.py @@ -5,6 +5,7 @@ from . import files as files_module from . import jobs as jobs_module from . import systems as systems_module +from . import launcher as launcher_module from .db.accessor import DatabaseAccessor # Import only the necessary classes/functions from jobs @@ -366,6 +367,103 @@ def revoke_credentials( ) +class ParametricSweepMethods: + """Interface for PyLauncher parameter sweeps. + + - ``generate`` — preview (``preview=True``) or write sweep files. + - ``submit`` — submit the sweep job to TACC. + """ + + def __init__(self, tapis_client): + self._tapis = tapis_client + + def generate( + self, + base_command: str, + sweep: Dict[str, Any], + directory: str = None, + *, + placeholder_style: str = "token", + debug: str = None, + preview: bool = False, + ): + """Generate PyLauncher sweep files or preview the parameter grid. + + With ``preview=True``, returns a DataFrame of all parameter + combinations — no files are written. + + Otherwise, expands *base_command* into one command per combination + and writes ``runsList.txt`` and ``call_pylauncher.py`` into + *directory*. Returns the list of generated commands. + + Args: + base_command: Command template with placeholders matching sweep keys. + sweep: Mapping of placeholder name to sequence of values. + directory: Directory to write files into (created if needed). + Required when *preview* is ``False``. + placeholder_style: ``"token"`` (default) for bare ``ALPHA``, + or ``"braces"`` for ``{ALPHA}``. + debug: Optional debug string (e.g. ``"host+job"``). + preview: If ``True``, return a DataFrame (dry run). + + Returns: + ``List[str]`` of commands, or ``pandas.DataFrame`` when + *preview* is ``True``. + """ + return launcher_module.generate_sweep( + base_command, sweep, directory, + placeholder_style=placeholder_style, debug=debug, preview=preview, + ) + + def submit( + self, + directory: str, + app_id: str, + allocation: str, + *, + node_count: Optional[int] = None, + cores_per_node: Optional[int] = None, + max_minutes: Optional[int] = None, + queue: Optional[str] = None, + **kwargs, + ): + """Submit a PyLauncher sweep job. + + Translates *directory* to a Tapis URI, builds a job request with + ``call_pylauncher.py`` as the script, and submits it. + + Args: + directory: Path to the input directory containing + ``runsList.txt`` and ``call_pylauncher.py`` + (e.g. ``"/MyData/sweep/"``). + app_id: Tapis application ID (e.g. ``"openseespy-s3"``). + allocation: TACC allocation to charge. + node_count: Number of compute nodes. + cores_per_node: Cores per node. + max_minutes: Maximum runtime in minutes. + queue: Execution queue name. + **kwargs: Additional arguments passed to + ``ds.jobs.generate()``. + + Returns: + SubmittedJob: A job object for monitoring via ``.monitor()``. + """ + input_uri = files_module.get_ds_path_uri(self._tapis, directory) + job_request = jobs_module.generate_job_request( + tapis_client=self._tapis, + app_id=app_id, + input_dir_uri=input_uri, + script_filename="call_pylauncher.py", + node_count=node_count, + cores_per_node=cores_per_node, + max_minutes=max_minutes, + queue=queue, + allocation=allocation, + **kwargs, + ) + return jobs_module.submit_job_request(self._tapis, job_request) + + class JobMethods: """Interface for Tapis job submission, monitoring, and management. @@ -374,6 +472,10 @@ class JobMethods: Args: tapis_client (Tapis): Authenticated Tapis client instance. + + Attributes: + parametric_sweep (ParametricSweepMethods): Interface for PyLauncher + parameter sweep generation. """ def __init__(self, tapis_client: Tapis): @@ -383,9 +485,10 @@ def __init__(self, tapis_client: Tapis): tapis_client (Tapis): Authenticated Tapis client instance. """ self._tapis = tapis_client + self.parametric_sweep = ParametricSweepMethods(tapis_client) # Method to generate the request dictionary - def generate_request( + def generate( self, app_id: str, input_dir_uri: str, @@ -456,7 +559,7 @@ def generate_request( JobSubmissionError: If job request generation fails. Example: - >>> job_request = ds.jobs.generate_request( + >>> job_request = ds.jobs.generate( ... app_id="matlab-r2023a", ... input_dir_uri="tapis://designsafe.storage.default/username/input/", ... script_filename="run_analysis.m", @@ -491,27 +594,26 @@ def generate_request( ) # Method to submit the generated request dictionary - def submit_request(self, job_request: Dict[str, Any]) -> SubmittedJob: - """Submit a pre-generated job request dictionary to Tapis. + def submit(self, job_request: Dict[str, Any]) -> SubmittedJob: + """Submit a job request dictionary to Tapis. - This method takes a complete job request dictionary (typically generated - by generate_request) and submits it to Tapis for execution. + Takes a job request dictionary (typically from ``generate()``) and + submits it to Tapis for execution. Args: - job_request (Dict[str, Any]): Complete job request dictionary containing - all necessary job parameters and configuration. + job_request (Dict[str, Any]): Complete job request dictionary. Returns: SubmittedJob: A SubmittedJob object for monitoring and managing the job. Raises: ValueError: If job_request is not a dictionary. - JobSubmissionError: If the Tapis submission fails or encounters an error. + JobSubmissionError: If the Tapis submission fails. Example: - >>> job_request = ds.jobs.generate_request(...) - >>> submitted_job = ds.jobs.submit_request(job_request) - >>> print(f"Job submitted with UUID: {submitted_job.uuid}") + >>> job_request = ds.jobs.generate(...) + >>> job = ds.jobs.submit(job_request) + >>> print(f"Job submitted with UUID: {job.uuid}") """ return jobs_module.submit_job_request(self._tapis, job_request) diff --git a/dapi/exceptions.py b/dapi/exceptions.py index fa66368..28446c6 100644 --- a/dapi/exceptions.py +++ b/dapi/exceptions.py @@ -155,7 +155,7 @@ class JobSubmissionError(DapiException): Example: >>> try: - ... job = client.jobs.submit_request(invalid_job_request) + ... job = client.jobs.submit(invalid_job_request) ... except JobSubmissionError as e: ... print(f"Job submission failed: {e}") ... if e.response: diff --git a/dapi/launcher.py b/dapi/launcher.py new file mode 100644 index 0000000..8ffe175 --- /dev/null +++ b/dapi/launcher.py @@ -0,0 +1,139 @@ +"""PyLauncher parameter sweep utilities for DesignSafe. + +This module provides functions for generating parameter sweeps and writing +PyLauncher input files. These are pure local operations — PyLauncher itself +runs on TACC compute nodes, not locally. + +Functions: + generate_sweep: Generate sweep commands and optionally write PyLauncher input files. +""" + +from __future__ import annotations + +from itertools import product +from pathlib import Path +from typing import Any, List, Mapping, Sequence, Union + +import pandas as pd + + +def _validate_sweep(sweep: Mapping[str, Sequence[Any]]) -> None: + """Validate sweep values are non-empty, non-string sequences.""" + for k, vals in sweep.items(): + if not isinstance(vals, Sequence) or isinstance(vals, (str, bytes)): + raise TypeError(f"sweep[{k!r}] must be a non-string sequence of values.") + if len(vals) == 0: + raise ValueError(f"sweep[{k!r}] is empty; provide at least one value.") + + +def _expand_commands( + base_command: str, + sweep: Mapping[str, Sequence[Any]], + placeholder_style: str, +) -> List[str]: + """Expand a command template into all parameter combinations.""" + if not sweep: + return [base_command] + + _validate_sweep(sweep) + + if placeholder_style not in ("token", "braces"): + raise ValueError("placeholder_style must be 'token' or 'braces'.") + + keys = list(sweep.keys()) + commands: List[str] = [] + for combo in product(*[sweep[k] for k in keys]): + cmd = base_command + for k, v in zip(keys, combo): + if placeholder_style == "token": + cmd = cmd.replace(k, str(v)) + else: + cmd = cmd.replace("{" + k + "}", str(v)) + commands.append(cmd) + + return commands + + +def generate_sweep( + base_command: str, + sweep: Mapping[str, Sequence[Any]], + directory: Union[str, Path, None] = None, + *, + placeholder_style: str = "token", + debug: str | None = None, + preview: bool = False, +) -> Union[List[str], pd.DataFrame]: + """Generate sweep commands and write PyLauncher input files. + + When *preview* is ``True``, returns a DataFrame of all parameter + combinations without writing any files — useful for inspecting the + sweep in a notebook before committing. + + When *preview* is ``False`` (default), expands *base_command* into one + command per parameter combination and writes ``runsList.txt`` and + ``call_pylauncher.py`` into *directory*. + + Args: + base_command: Command template containing placeholders that match + keys in *sweep*. Environment variables like ``$WORK`` or + ``$SLURM_JOB_ID`` are left untouched. + sweep: Mapping of placeholder name to a sequence of values. + Example: ``{"ALPHA": [0.3, 0.5], "BETA": [1, 2]}``. + directory: Directory to write files into. Created if it doesn't + exist. Required when *preview* is ``False``. + placeholder_style: How placeholders appear in *base_command*: + + - ``"token"`` (default): bare tokens, e.g. ``ALPHA`` + - ``"braces"``: brace-wrapped, e.g. ``{ALPHA}`` + + debug: Optional debug string passed to ``ClassicLauncher`` + (e.g. ``"host+job"``). Ignored when *preview* is ``True``. + preview: If ``True``, return a DataFrame of parameter combinations + without writing files. + + Returns: + ``List[str]`` of generated commands when *preview* is ``False``, + or a ``pandas.DataFrame`` of parameter combinations when ``True``. + + Raises: + TypeError: If a sweep value is not a non-string sequence. + ValueError: If a sweep value is empty, *placeholder_style* is + invalid, or *directory* is missing when *preview* is ``False``. + """ + if sweep: + _validate_sweep(sweep) + + if preview: + if not sweep: + return pd.DataFrame() + keys = list(sweep.keys()) + rows = [dict(zip(keys, combo)) for combo in product(*[sweep[k] for k in keys])] + return pd.DataFrame(rows) + + if directory is None: + raise ValueError("directory is required when preview=False.") + + commands = _expand_commands(base_command, sweep, placeholder_style) + + dirpath = Path(directory) + dirpath.mkdir(parents=True, exist_ok=True) + + # Write runsList.txt + (dirpath / "runsList.txt").write_text( + "\n".join(commands) + "\n", encoding="utf-8" + ) + + # Write call_pylauncher.py + if debug is not None: + script = ( + "import pylauncher\n" + f'pylauncher.ClassicLauncher("runsList.txt", debug="{debug}")\n' + ) + else: + script = ( + "import pylauncher\n" + 'pylauncher.ClassicLauncher("runsList.txt")\n' + ) + (dirpath / "call_pylauncher.py").write_text(script, encoding="utf-8") + + return commands diff --git a/docs/api/index.md b/docs/api/index.md index fab0179..836b893 100644 --- a/docs/api/index.md +++ b/docs/api/index.md @@ -11,6 +11,7 @@ The DAPI package is organized into several core modules: ### **Service Modules** - **[Jobs](jobs.md)** - Job submission, monitoring, and management +- **[Launcher](launcher.md)** - PyLauncher parameter sweep utilities - **[Files](files.md)** - File operations and path translation - **[Apps](apps.md)** - Application discovery and details - **[Systems](systems.md)** - System information and queue management @@ -32,13 +33,13 @@ from dapi import DSClient ds = DSClient() # Access different services -ds.jobs.generate_request(...) +ds.jobs.generate(...) ds.files.upload(...) ds.db.ngl.read_sql(...) ``` ### **Common Operations** -- **Submit Jobs**: `ds.jobs.submit_request(job_dict)` +- **Submit Jobs**: `ds.jobs.submit(job_dict)` - **Monitor Jobs**: `submitted_job.monitor()` - **File Upload**: `ds.files.upload(local_path, remote_uri)` - **File Download**: `ds.files.download(remote_uri, local_path)` diff --git a/docs/api/launcher.md b/docs/api/launcher.md new file mode 100644 index 0000000..0de21d5 --- /dev/null +++ b/docs/api/launcher.md @@ -0,0 +1,17 @@ +# Launcher + +PyLauncher parameter sweep utilities for generating task lists and launcher scripts. + +## Generate Sweep + +```{eval-rst} +.. autofunction:: dapi.launcher.generate_sweep +``` + +## Client Interface + +```{eval-rst} +.. autoclass:: dapi.client.ParametricSweepMethods + :members: + :undoc-members: +``` diff --git a/docs/authentication.md b/docs/authentication.md index b89bdc6..7dc3438 100644 --- a/docs/authentication.md +++ b/docs/authentication.md @@ -243,8 +243,8 @@ ds = DSClient() ds.systems.establish_credentials("frontera") # Now submit jobs as usual -job_request = ds.jobs.generate_request(...) -job = ds.jobs.submit_request(job_request) +job_request = ds.jobs.generate(...) +job = ds.jobs.submit(job_request) ``` ### Troubleshooting TMS diff --git a/docs/examples.md b/docs/examples.md index c629461..b71ed5e 100644 --- a/docs/examples.md +++ b/docs/examples.md @@ -44,6 +44,20 @@ Submit and monitor MPM simulations for large deformation problems. --- +### PyLauncher Parameter Sweeps +Run many independent tasks within a single SLURM allocation using PyLauncher. + +[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/pylauncher_sweep.ipynb) + +**What you'll learn:** +- Generating parameter sweep commands from templates +- Writing PyLauncher task lists and launcher scripts +- Submitting and monitoring sweep jobs + +**[View Full Documentation →](examples/pylauncher.md)** + +--- + ### OpenSees Structural Analysis Perform earthquake engineering simulations with OpenSees. diff --git a/docs/examples/mpm.md b/docs/examples/mpm.md index d0bb3fb..46ac409 100644 --- a/docs/examples/mpm.md +++ b/docs/examples/mpm.md @@ -102,7 +102,7 @@ print(f"Input Directory Tapis URI: {input_uri}") ```python # Generate job request dictionary using app defaults -job_dict = ds.jobs.generate_request( +job_dict = ds.jobs.generate( app_id=app_id_to_use, input_dir_uri=input_uri, script_filename=input_filename, @@ -130,7 +130,7 @@ print(json.dumps(job_dict, indent=2, default=str)) ```python # Extended job configuration options -job_dict = ds.jobs.generate_request( +job_dict = ds.jobs.generate( app_id=app_id_to_use, input_dir_uri=input_uri, script_filename=input_filename, @@ -184,7 +184,7 @@ resources = { ```python # Submit the job to TACC -submitted_job = ds.jobs.submit_request(job_dict) +submitted_job = ds.jobs.submit(job_dict) print(f"Job UUID: {submitted_job.uuid}") ``` diff --git a/docs/examples/openfoam.md b/docs/examples/openfoam.md index f5bc06e..7961f75 100644 --- a/docs/examples/openfoam.md +++ b/docs/examples/openfoam.md @@ -104,7 +104,7 @@ print(f"Input Directory Tapis URI: {input_uri}") ```python # Generate job request dictionary using app defaults -job_dict = ds.jobs.generate_request( +job_dict = ds.jobs.generate( app_id=app_id_to_use, input_dir_uri=input_uri, max_minutes=max_job_minutes, @@ -130,7 +130,7 @@ print(json.dumps(job_dict, indent=2, default=str)) ```python # Extended job configuration options -job_dict = ds.jobs.generate_request( +job_dict = ds.jobs.generate( app_id=app_id_to_use, input_dir_uri=input_uri, max_minutes=max_job_minutes, @@ -191,7 +191,7 @@ resources = { ```python # Submit the job to TACC -submitted_job = ds.jobs.submit_request(job_dict) +submitted_job = ds.jobs.submit(job_dict) print(f"Job UUID: {submitted_job.uuid}") ``` @@ -434,7 +434,7 @@ systems = { ```python # Full-featured job request showing all options -complete_job = ds.jobs.generate_request( +complete_job = ds.jobs.generate( # Required parameters app_id="openfoam-stampede3", input_dir_uri=input_uri, diff --git a/docs/examples/opensees.md b/docs/examples/opensees.md index 9611db7..6bbf7f3 100644 --- a/docs/examples/opensees.md +++ b/docs/examples/opensees.md @@ -94,7 +94,7 @@ print(f"Input Directory Tapis URI: {input_uri}") ```python # Generate job request dictionary using app defaults -job_dict = ds.jobs.generate_request( +job_dict = ds.jobs.generate( app_id=app_id, input_dir_uri=input_uri, script_filename=input_filename, @@ -125,7 +125,7 @@ print(json.dumps(job_dict, indent=2, default=str)) ```python # Extended job configuration options -job_dict = ds.jobs.generate_request( +job_dict = ds.jobs.generate( app_id=app_id, input_dir_uri=input_uri, script_filename=input_filename, @@ -175,7 +175,7 @@ Visit [OpenSees userguide on DesignSafe](https://www.designsafe-ci.org/user-guid ```python # Submit job using dapi -submitted_job = ds.jobs.submit_request(job_dict) +submitted_job = ds.jobs.submit(job_dict) print(f"Job launched with UUID: {submitted_job.uuid}") print("Can also check in DesignSafe portal under - Workspace > Tools & Application > Job Status") ``` diff --git a/docs/examples/pylauncher.md b/docs/examples/pylauncher.md new file mode 100644 index 0000000..6257a56 --- /dev/null +++ b/docs/examples/pylauncher.md @@ -0,0 +1,131 @@ +# PyLauncher Parameter Sweeps + +Run many independent tasks within a single SLURM allocation using [PyLauncher](https://github.com/TACC/pylauncher) and dapi's parameter sweep utilities. + +[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/pylauncher_sweep.ipynb) + +## When to Use PyLauncher + +- You have many independent serial runs (parameter studies, Monte Carlo, etc.) +- Each run writes to its own output directory +- You want efficient use of multi-core allocations without MPI + +## End-to-End Workflow + +### 1. Define the Parameter Sweep + +```python +from dapi import DSClient + +ds = DSClient() + +sweep = { + "ALPHA": [0.3, 0.5, 3.7], + "BETA": [1.1, 2.0, 3.0], +} +``` + +### 2. Preview (dry run) + +```python +ds.jobs.parametric_sweep.generate( + 'python3 simulate.py --alpha ALPHA --beta BETA', + sweep, + preview=True, +) +``` + +| | ALPHA | BETA | +|---|-------|------| +| 0 | 0.3 | 1.1 | +| 1 | 0.3 | 2.0 | +| ... | ... | ... | +| 8 | 3.7 | 3.0 | + +### 3. Generate Sweep Files + +```python +ds.jobs.parametric_sweep.generate( + 'python3 simulate.py --alpha ALPHA --beta BETA ' + '--output "$WORK/sweep_$SLURM_JOB_ID/run_ALPHA_BETA"', + sweep, + "/home/jupyter/MyData/pylauncher_demo/", + debug="host+job", +) +``` + +### 4. Submit + +```python +job = ds.jobs.parametric_sweep.submit( + "/MyData/pylauncher_demo/", + app_id="agnostic", + allocation="your_allocation", + node_count=1, + cores_per_node=48, + max_minutes=30, +) +job.monitor() +``` + +## Placeholder Styles + +Two styles are supported for command templates: + +**Token style** (default) — bare uppercase placeholders: + +```python +"python run.py --mass MASS --length LENGTH" +``` + +**Braces style** — for when token names might collide with other text: + +```python +"python run.py --mass {MASS} --length {LENGTH}" +# pass placeholder_style="braces" +``` + +## OpenSees Example + +A realistic parameter sweep for a cantilever pushover analysis: + +```python +sweep = { + "NODAL_MASS": [4.19, 4.39, 4.59, 4.79, 4.99], + "LCOL": [100, 200, 300], +} + +ds.jobs.parametric_sweep.generate( + "python3 cantilever.py --mass NODAL_MASS --lcol LCOL " + "--outDir out_NODAL_MASS_LCOL", + sweep, + "/home/jupyter/MyData/opensees_sweep/", +) + +job = ds.jobs.parametric_sweep.submit( + "/MyData/opensees_sweep/", + app_id="openseespy-s3", + allocation="your_allocation", + node_count=2, + cores_per_node=48, +) +job.monitor() +``` + +## Output Directory Pattern + +Use TACC environment variables for collision-free output directories: + +``` +$WORK/sweep_$SLURM_JOB_ID/run_ALPHA_BETA +``` + +- `$WORK` — TACC Work filesystem (avoids archiving overhead) +- `$SLURM_JOB_ID` — unique per job submission +- `$LAUNCHER_JID` / `$LAUNCHER_TSK_ID` — unique per PyLauncher task + +## Notes + +- **PyLauncher is NOT a dapi dependency** — it's pre-installed on TACC compute nodes. dapi only generates the input files. +- **MPI is disabled** — PyLauncher's `ClassicLauncher` runs independent serial tasks. The apps used (`agnostic`, `openseespy-s3`) already have `isMpi: false`. +- **Works with any app** — OpenSees, Python, MATLAB, Fortran binaries. The task list is just shell commands. diff --git a/docs/index.md b/docs/index.md index f90842f..a91ca7a 100644 --- a/docs/index.md +++ b/docs/index.md @@ -45,12 +45,12 @@ from dapi import DSClient ds = DSClient() # Submit a job -job_request = ds.jobs.generate_request( +job_request = ds.jobs.generate( app_id="matlab-r2023a", input_dir_uri="/MyData/analysis/input/", script_filename="run_analysis.m" ) -job = ds.jobs.submit_request(job_request) +job = ds.jobs.submit(job_request) # Monitor progress final_status = job.monitor() diff --git a/docs/jobs.md b/docs/jobs.md index af3b56e..c63662a 100644 --- a/docs/jobs.md +++ b/docs/jobs.md @@ -111,7 +111,7 @@ input_path = "/MyData/analysis/input/" input_uri = ds.files.translate_path_to_uri(input_path, verify_exists=True) # 2. Generate job request -job_request = ds.jobs.generate_request( +job_request = ds.jobs.generate( app_id="matlab-r2023a", input_dir_uri=input_uri, script_filename="run_analysis.m", @@ -120,14 +120,14 @@ job_request = ds.jobs.generate_request( ) # 3. Submit job -job = ds.jobs.submit_request(job_request) +job = ds.jobs.submit(job_request) print(f"Job submitted: {job.uuid}") ``` ### Advanced Job Configuration ```python -job_request = ds.jobs.generate_request( +job_request = ds.jobs.generate( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="mpm.json", @@ -172,7 +172,7 @@ job_request = ds.jobs.generate_request( ```python # Generate base request -job_request = ds.jobs.generate_request(...) +job_request = ds.jobs.generate(...) # Modify before submission job_request["name"] = "custom_job_name" @@ -192,7 +192,7 @@ job_request["parameterSet"]["envVariables"].append({ }) # Submit modified request -job = ds.jobs.submit_request(job_request) +job = ds.jobs.submit(job_request) ``` ## Job Monitoring @@ -201,7 +201,7 @@ job = ds.jobs.submit_request(job_request) ```python # Submit job -job = ds.jobs.submit_request(job_request) +job = ds.jobs.submit(job_request) # Monitor with progress bars final_status = job.monitor( @@ -425,13 +425,60 @@ except Exception as e: print(f"Cannot access Stampede3: {e}") ``` +## Parameter Sweeps with PyLauncher + +[PyLauncher](https://github.com/TACC/pylauncher) runs many independent tasks within a single SLURM allocation — ideal for parameter studies on DesignSafe. dapi provides built-in support for generating sweep commands, task lists, and launcher scripts. + +### Quick Example + +```python +from dapi import DSClient + +ds = DSClient() + +# Define parameter sweep +sweep = { + "ALPHA": [0.3, 0.5, 3.7], + "BETA": [1.1, 2.0, 3.0], +} + +# Preview (dry run) +ds.jobs.parametric_sweep.generate( + 'python3 simulate.py --alpha ALPHA --beta BETA', + sweep, + preview=True, +) + +# Generate sweep files +ds.jobs.parametric_sweep.generate( + 'python3 simulate.py --alpha ALPHA --beta BETA ' + '--output "$WORK/sweep_$SLURM_JOB_ID/run_ALPHA_BETA"', + sweep, + "/home/jupyter/MyData/sweep_demo/", + debug="host+job", +) + +# Submit the job +job = ds.jobs.parametric_sweep.submit( + "/MyData/sweep_demo/", + app_id="agnostic", + allocation="your_allocation", + node_count=1, + cores_per_node=48, + max_minutes=30, +) +job.monitor() +``` + +For a full walkthrough with OpenSees, see the **[PyLauncher example](examples/pylauncher.md)**. + ## Advanced Patterns ### Parametric Studies ```python # Submit multiple jobs with different parameters -base_request = ds.jobs.generate_request( +base_request = ds.jobs.generate( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="template.json", @@ -465,7 +512,7 @@ for i, params in enumerate(parameters): ]) # Submit job - job = ds.jobs.submit_request(job_req) + job = ds.jobs.submit(job_req) submitted_jobs.append(job) print(f"Submitted job {i+1}/{len(parameters)}: {job.uuid}") @@ -482,7 +529,7 @@ for i, job in enumerate(submitted_jobs): ```python # Submit jobs with dependencies (manual coordination) # Job 1: Preprocessing -prep_job = ds.jobs.submit_request(preprocessing_request) +prep_job = ds.jobs.submit(preprocessing_request) prep_status = prep_job.monitor() if prep_status == "FINISHED": @@ -495,7 +542,7 @@ if prep_status == "FINISHED": "targetPath": "preprocessed" }) - main_job = ds.jobs.submit_request(main_request) + main_job = ds.jobs.submit(main_request) main_status = main_job.monitor() if main_status == "FINISHED": @@ -508,7 +555,7 @@ if prep_status == "FINISHED": "targetPath": "results" }) - post_job = ds.jobs.submit_request(post_request) + post_job = ds.jobs.submit(post_request) final_status = post_job.monitor() print(f"Pipeline complete. Final status: {final_status}") @@ -523,7 +570,7 @@ from dapi import JobSubmissionError, JobMonitorError try: # Job submission - job = ds.jobs.submit_request(job_request) + job = ds.jobs.submit(job_request) final_status = job.monitor() except JobSubmissionError as e: @@ -581,7 +628,7 @@ if final_status == "FAILED": ### 1. Resource Planning ```python # Choose appropriate resources -job_request = ds.jobs.generate_request( +job_request = ds.jobs.generate( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="analysis.json", @@ -607,7 +654,7 @@ job_request["tags"] = ["research", "seismic", site_id, method] max_retries = 3 for attempt in range(max_retries): try: - job = ds.jobs.submit_request(job_request) + job = ds.jobs.submit(job_request) final_status = job.monitor() break except JobSubmissionError as e: diff --git a/docs/quickstart.md b/docs/quickstart.md index d73e770..71cdef3 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -22,7 +22,7 @@ ds = DSClient() matlab_apps = ds.apps.find("matlab", verbose=True) # 3. Submit a simple job -job_request = ds.jobs.generate_request( +job_request = ds.jobs.generate( app_id="matlab-r2023a", input_dir_uri="/MyData/analysis/input/", script_filename="run_analysis.m", @@ -31,7 +31,7 @@ job_request = ds.jobs.generate_request( ) # 4. Submit and monitor -job = ds.jobs.submit_request(job_request) +job = ds.jobs.submit(job_request) final_status = job.monitor() # 5. Check results @@ -103,7 +103,7 @@ for file in files: ```python # Generate a job request with automatic parameter mapping -job_request = ds.jobs.generate_request( +job_request = ds.jobs.generate( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="mpm.json", @@ -122,7 +122,7 @@ job_request["tags"] = ["research", "mpm"] ```python # Submit the job -job = ds.jobs.submit_request(job_request) +job = ds.jobs.submit(job_request) print(f"Job submitted: {job.uuid}") # Monitor with real-time progress @@ -184,7 +184,7 @@ print(site_data) ```python # Submit MATLAB job -job_request = ds.jobs.generate_request( +job_request = ds.jobs.generate( app_id="matlab-r2023a", input_dir_uri="/MyData/matlab/analysis/", script_filename="main.m", @@ -192,7 +192,7 @@ job_request = ds.jobs.generate_request( allocation="your_allocation" ) -job = ds.jobs.submit_request(job_request) +job = ds.jobs.submit(job_request) final_status = job.monitor() if final_status == "FINISHED": @@ -204,7 +204,7 @@ if final_status == "FINISHED": ```python # Submit OpenSees job -job_request = ds.jobs.generate_request( +job_request = ds.jobs.generate( app_id="opensees-express", input_dir_uri="/MyData/opensees/earthquake/", script_filename="earthquake_analysis.tcl", @@ -212,7 +212,7 @@ job_request = ds.jobs.generate_request( allocation="your_allocation" ) -job = ds.jobs.submit_request(job_request) +job = ds.jobs.submit(job_request) final_status = job.monitor() ``` @@ -248,7 +248,7 @@ import os os.environ['DEFAULT_ALLOCATION'] = 'your_tacc_allocation' # Now you can omit allocation in job requests -job_request = ds.jobs.generate_request( +job_request = ds.jobs.generate( app_id="mpm-s3", input_dir_uri=input_uri, script_filename="mpm.json" @@ -293,8 +293,8 @@ try: ds = DSClient() # Try to submit job - job_request = ds.jobs.generate_request(...) - job = ds.jobs.submit_request(job_request) + job_request = ds.jobs.generate(...) + job = ds.jobs.submit(job_request) final_status = job.monitor() except AuthenticationError as e: @@ -336,7 +336,7 @@ job_request["tags"] = ["earthquake", "site-A", "research"] ### 3. Handle Long-Running Jobs ```python # For long jobs, save job UUID for later monitoring -job = ds.jobs.submit_request(job_request) +job = ds.jobs.submit(job_request) job_uuid = job.uuid # Save UUID to file or environment diff --git a/examples/mpm/mpm-minimal.ipynb b/examples/mpm/mpm-minimal.ipynb index cd7d8f3..d691233 100644 --- a/examples/mpm/mpm-minimal.ipynb +++ b/examples/mpm/mpm-minimal.ipynb @@ -154,73 +154,11 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "6257d31a", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Generating job request for app 'mpm-s3'...\n", - "Using App Details: mpm-s3 v1.0\n", - "Found exact match for input parameter: 'Input Directory'\n", - "Placing script 'mpm.json' in appArgs: 'Input Script'\n", - "Adding/Updating TACC allocation: ASC25049\n", - "Job request dictionary generated successfully.\n", - "{\n", - " \"name\": \"mpm-s3-20250609_214141\",\n", - " \"appId\": \"mpm-s3\",\n", - " \"appVersion\": \"1.0\",\n", - " \"description\": \"Material Point Method (MPM) is a particle based method that represents the material as a collection of material points, and their deformations are determined by Newton\\u2019s laws of motion.\",\n", - " \"execSystemId\": \"stampede3\",\n", - " \"archiveSystemId\": \"stampede3\",\n", - " \"archiveSystemDir\": \"HOST_EVAL($WORK)/tapis-jobs-archive/${JobCreateDate}/${JobName}-${JobUUID}\",\n", - " \"archiveOnAppError\": true,\n", - " \"execSystemLogicalQueue\": \"skx-dev\",\n", - " \"nodeCount\": 1,\n", - " \"coresPerNode\": 48,\n", - " \"maxMinutes\": 10,\n", - " \"memoryMB\": 192000,\n", - " \"isMpi\": false,\n", - " \"tags\": [],\n", - " \"fileInputs\": [\n", - " {\n", - " \"name\": \"Input Directory\",\n", - " \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/\",\n", - " \"autoMountLocal\": true,\n", - " \"targetPath\": \"inputDirectory\"\n", - " }\n", - " ],\n", - " \"parameterSet\": {\n", - " \"appArgs\": [\n", - " {\n", - " \"name\": \"Input Script\",\n", - " \"arg\": \"mpm.json\"\n", - " }\n", - " ],\n", - " \"schedulerOptions\": [\n", - " {\n", - " \"name\": \"TACC Allocation\",\n", - " \"arg\": \"-A ASC25049\"\n", - " }\n", - " ]\n", - " }\n", - "}\n" - ] - } - ], - "source": [ - "# Generate job request dictionary using app defaults\n", - "job_dict = ds.jobs.generate_request(\n", - " app_id=app_id_to_use,\n", - " input_dir_uri=input_uri,\n", - " script_filename=input_filename,\n", - " max_minutes=max_job_minutes,\n", - " allocation=tacc_allocation,\n", - ")\n", - "print(json.dumps(job_dict, indent=2, default=str))" - ] + "outputs": [], + "source": "# Generate job request dictionary using app defaults\njob_dict = ds.jobs.generate(\n app_id=app_id_to_use,\n input_dir_uri=input_uri,\n script_filename=input_filename,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n)\nprint(json.dumps(job_dict, indent=2, default=str))" }, { "cell_type": "code", @@ -283,66 +221,11 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "8e04a5ef", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "--- Submitting Tapis Job Request ---\n", - "{\n", - " \"name\": \"mpm-s3-20250609_214141\",\n", - " \"appId\": \"mpm-s3\",\n", - " \"appVersion\": \"1.0\",\n", - " \"description\": \"Material Point Method (MPM) is a particle based method that represents the material as a collection of material points, and their deformations are determined by Newton\\u2019s laws of motion.\",\n", - " \"execSystemId\": \"stampede3\",\n", - " \"archiveSystemId\": \"stampede3\",\n", - " \"archiveSystemDir\": \"HOST_EVAL($WORK)/tapis-jobs-archive/${JobCreateDate}/${JobName}-${JobUUID}\",\n", - " \"archiveOnAppError\": true,\n", - " \"execSystemLogicalQueue\": \"skx-dev\",\n", - " \"nodeCount\": 1,\n", - " \"coresPerNode\": 1,\n", - " \"maxMinutes\": 10,\n", - " \"memoryMB\": 192000,\n", - " \"isMpi\": false,\n", - " \"tags\": [],\n", - " \"fileInputs\": [\n", - " {\n", - " \"name\": \"Input Directory\",\n", - " \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/\",\n", - " \"autoMountLocal\": true,\n", - " \"targetPath\": \"inputDirectory\"\n", - " }\n", - " ],\n", - " \"parameterSet\": {\n", - " \"appArgs\": [\n", - " {\n", - " \"name\": \"Input Script\",\n", - " \"arg\": \"mpm.json\"\n", - " }\n", - " ],\n", - " \"schedulerOptions\": [\n", - " {\n", - " \"name\": \"TACC Allocation\",\n", - " \"arg\": \"-A ASC25049\"\n", - " }\n", - " ]\n", - " }\n", - "}\n", - "------------------------------------\n", - "Job submitted successfully. UUID: 96f1a204-b223-40dc-93ef-98e92046dba0-007\n", - "Job UUID: 96f1a204-b223-40dc-93ef-98e92046dba0-007\n" - ] - } - ], - "source": [ - "# Submit the job to TACC\n", - "submitted_job = ds.jobs.submit_request(job_dict)\n", - "print(f\"Job UUID: {submitted_job.uuid}\")" - ] + "outputs": [], + "source": "# Submit the job to TACC\nsubmitted_job = ds.jobs.submit(job_dict)\nprint(f\"Job UUID: {submitted_job.uuid}\")" }, { "cell_type": "code", @@ -626,4 +509,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/examples/mpm/mpm.ipynb b/examples/mpm/mpm.ipynb index 6281265..c1c9dd3 100644 --- a/examples/mpm/mpm.ipynb +++ b/examples/mpm/mpm.ipynb @@ -216,83 +216,8 @@ "execution_count": null, "id": "6257d31a", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Generating job request dictionary...\n", - "Generating job request for app 'mpm-s3'...\n", - "Using App Details: mpm-s3 v1.0\n", - "Placing script 'mpm.json' in appArgs: 'Input Script'\n", - "Adding allocation: ASC25049\n", - "Job request dictionary generated successfully.\n", - "\n", - "--- Generated Job Request Dictionary ---\n", - "{\n", - " \"name\": \"mpm-s3-20250604_081741\",\n", - " \"appId\": \"mpm-s3\",\n", - " \"appVersion\": \"1.0\",\n", - " \"description\": \"Material Point Method (MPM) is a particle based method that represents the material as a collection of material points, and their deformations are determined by Newton\\u2019s laws of motion.\",\n", - " \"execSystemId\": \"stampede3\",\n", - " \"archiveSystemId\": \"stampede3\",\n", - " \"archiveOnAppError\": true,\n", - " \"execSystemLogicalQueue\": \"skx-dev\",\n", - " \"nodeCount\": 1,\n", - " \"coresPerNode\": 48,\n", - " \"maxMinutes\": 10,\n", - " \"memoryMB\": 192000,\n", - " \"isMpi\": false,\n", - " \"tags\": [],\n", - " \"fileInputs\": [\n", - " {\n", - " \"name\": \"Input Directory\",\n", - " \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/\",\n", - " \"autoMountLocal\": true,\n", - " \"targetPath\": \"inputDirectory\"\n", - " }\n", - " ],\n", - " \"parameterSet\": {\n", - " \"appArgs\": [\n", - " {\n", - " \"name\": \"Input Script\",\n", - " \"arg\": \"mpm.json\"\n", - " }\n", - " ],\n", - " \"schedulerOptions\": [\n", - " {\n", - " \"name\": \"TACC Allocation\",\n", - " \"arg\": \"-A ASC25049\"\n", - " }\n", - " ]\n", - " }\n", - "}\n", - "---------------------------------------\n" - ] - } - ], - "source": [ - "try:\n", - " print(\"\\nGenerating job request dictionary...\")\n", - " job_dict = ds.jobs.generate_request(\n", - " app_id=app_id_to_use,\n", - " input_dir_uri=input_uri,\n", - " script_filename=input_filename,\n", - " max_minutes=max_job_minutes,\n", - " allocation=tacc_allocation,\n", - " # queue=queue, # Uncomment if you want to specify a queue\n", - " )\n", - " print(\"\\n--- Generated Job Request Dictionary ---\")\n", - " print(json.dumps(job_dict, indent=2, default=str))\n", - " print(\"---------------------------------------\")\n", - "except (AppDiscoveryError, ValueError, JobSubmissionError) as e:\n", - " print(f\"Error generating job request: {e}\")\n", - " raise SystemExit(\"Stopping notebook due to job request generation error.\")\n", - "except Exception as e:\n", - " print(f\"An unexpected error occurred during job request generation: {e}\")\n", - " raise SystemExit(\"Stopping notebook due to unexpected generation error.\")" - ] + "outputs": [], + "source": "try:\n print(\"\\nGenerating job request dictionary...\")\n job_dict = ds.jobs.generate(\n app_id=app_id_to_use,\n input_dir_uri=input_uri,\n script_filename=input_filename,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n # queue=queue, # Uncomment if you want to specify a queue\n )\n print(\"\\n--- Generated Job Request Dictionary ---\")\n print(json.dumps(job_dict, indent=2, default=str))\n print(\"---------------------------------------\")\nexcept (AppDiscoveryError, ValueError, JobSubmissionError) as e:\n print(f\"Error generating job request: {e}\")\n raise SystemExit(\"Stopping notebook due to job request generation error.\")\nexcept Exception as e:\n print(f\"An unexpected error occurred during job request generation: {e}\")\n raise SystemExit(\"Stopping notebook due to unexpected generation error.\")" }, { "cell_type": "code", @@ -359,82 +284,11 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "8e04a5ef", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Submitting the job request dictionary...\n", - "\n", - "--- Submitting Tapis Job Request ---\n", - "{\n", - " \"name\": \"mpm-s3-20250604_081741\",\n", - " \"appId\": \"mpm-s3\",\n", - " \"appVersion\": \"1.0\",\n", - " \"description\": \"Material Point Method (MPM) is a particle based method that represents the material as a collection of material points, and their deformations are determined by Newton\\u2019s laws of motion.\",\n", - " \"execSystemId\": \"stampede3\",\n", - " \"archiveSystemId\": \"stampede3\",\n", - " \"archiveOnAppError\": true,\n", - " \"execSystemLogicalQueue\": \"skx-dev\",\n", - " \"nodeCount\": 1,\n", - " \"coresPerNode\": 1,\n", - " \"maxMinutes\": 10,\n", - " \"memoryMB\": 192000,\n", - " \"isMpi\": false,\n", - " \"tags\": [],\n", - " \"fileInputs\": [\n", - " {\n", - " \"name\": \"Input Directory\",\n", - " \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/\",\n", - " \"autoMountLocal\": true,\n", - " \"targetPath\": \"inputDirectory\"\n", - " }\n", - " ],\n", - " \"parameterSet\": {\n", - " \"appArgs\": [\n", - " {\n", - " \"name\": \"Input Script\",\n", - " \"arg\": \"mpm.json\"\n", - " }\n", - " ],\n", - " \"schedulerOptions\": [\n", - " {\n", - " \"name\": \"TACC Allocation\",\n", - " \"arg\": \"-A ASC25049\"\n", - " }\n", - " ]\n", - " }\n", - "}\n", - "------------------------------------\n", - "Job submitted successfully. UUID: 52f48eaf-b7d6-4964-a97b-a4b32a6aaeb3-007\n", - "Job Submitted Successfully!\n", - "Job UUID: 52f48eaf-b7d6-4964-a97b-a4b32a6aaeb3-007\n" - ] - } - ], - "source": [ - "if \"job_dict\" not in locals():\n", - " print(\"Error: job_dict not found.\")\n", - " raise SystemExit(\"Stopping notebook.\")\n", - "try:\n", - " print(\"\\nSubmitting the job request dictionary...\")\n", - " submitted_job = ds.jobs.submit_request(job_dict)\n", - " print(f\"Job Submitted Successfully!\")\n", - " print(f\"Job UUID: {submitted_job.uuid}\")\n", - "except JobSubmissionError as e:\n", - " print(f\"Job submission failed: {e}\")\n", - " print(\"\\n--- Failed Job Request ---\")\n", - " print(json.dumps(job_dict, indent=2, default=str))\n", - " print(\"--------------------------\")\n", - " raise SystemExit(\"Stopping notebook due to job submission error.\")\n", - "except Exception as e:\n", - " print(f\"An unexpected error occurred during job submission: {e}\")\n", - " raise SystemExit(\"Stopping notebook due to unexpected submission error.\")" - ] + "outputs": [], + "source": "if \"job_dict\" not in locals():\n print(\"Error: job_dict not found.\")\n raise SystemExit(\"Stopping notebook.\")\ntry:\n print(\"\\nSubmitting the job request dictionary...\")\n submitted_job = ds.jobs.submit(job_dict)\n print(f\"Job Submitted Successfully!\")\n print(f\"Job UUID: {submitted_job.uuid}\")\nexcept JobSubmissionError as e:\n print(f\"Job submission failed: {e}\")\n print(\"\\n--- Failed Job Request ---\")\n print(json.dumps(job_dict, indent=2, default=str))\n print(\"--------------------------\")\n raise SystemExit(\"Stopping notebook due to job submission error.\")\nexcept Exception as e:\n print(f\"An unexpected error occurred during job submission: {e}\")\n raise SystemExit(\"Stopping notebook due to unexpected submission error.\")" }, { "cell_type": "code", @@ -1149,4 +1003,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/examples/openfoam/openfoam-minimal.ipynb b/examples/openfoam/openfoam-minimal.ipynb index c27735c..bc59e21 100644 --- a/examples/openfoam/openfoam-minimal.ipynb +++ b/examples/openfoam/openfoam-minimal.ipynb @@ -132,82 +132,10 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Generating job request for app 'openfoam-stampede3'...\n", - "Using App Details: openfoam-stampede3 v12.0.0\n", - "Found exact match for input parameter: 'Case Directory'\n", - "script_filename is None, skipping script parameter placement.\n", - "Adding/Updating TACC allocation: ASC25049\n", - "Job request dictionary generated successfully.\n", - "{\n", - " \"name\": \"openfoam-stampede3-20250609_190344\",\n", - " \"appId\": \"openfoam-stampede3\",\n", - " \"appVersion\": \"12.0.0\",\n", - " \"description\": \"OpenFOAM is free, open source software for computational fluid dynamics (CFD).\",\n", - " \"execSystemId\": \"stampede3\",\n", - " \"archiveSystemId\": \"designsafe.storage.default\",\n", - " \"archiveSystemDir\": \"${EffectiveUserId}/tapis-jobs-archive/${JobCreateDate}/${JobUUID}\",\n", - " \"archiveOnAppError\": true,\n", - " \"execSystemLogicalQueue\": \"skx-dev\",\n", - " \"nodeCount\": 1,\n", - " \"coresPerNode\": 48,\n", - " \"maxMinutes\": 10,\n", - " \"memoryMB\": 192000,\n", - " \"isMpi\": false,\n", - " \"tags\": [],\n", - " \"fileInputs\": [\n", - " {\n", - " \"name\": \"Case Directory\",\n", - " \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/OpenFOAM/DH1_run\",\n", - " \"autoMountLocal\": true,\n", - " \"targetPath\": \"inputDirectory\"\n", - " }\n", - " ],\n", - " \"parameterSet\": {\n", - " \"envVariables\": [\n", - " {\n", - " \"key\": \"mesh\",\n", - " \"value\": \"On\"\n", - " },\n", - " {\n", - " \"key\": \"solver\",\n", - " \"value\": \"pisoFoam\"\n", - " },\n", - " {\n", - " \"key\": \"decomp\",\n", - " \"value\": \"On\"\n", - " }\n", - " ],\n", - " \"schedulerOptions\": [\n", - " {\n", - " \"name\": \"TACC Allocation\",\n", - " \"arg\": \"-A ASC25049\"\n", - " }\n", - " ]\n", - " }\n", - "}\n" - ] - } - ], - "source": [ - "# Generate job request dictionary using app defaults\n", - "job_dict = ds.jobs.generate_request(\n", - " app_id=app_id_to_use,\n", - " input_dir_uri=input_uri,\n", - " max_minutes=max_job_minutes,\n", - " allocation=tacc_allocation,\n", - " archive_system=\"designsafe\",\n", - " extra_env_vars=openfoam_env_vars,\n", - " input_dir_param_name=\"Case Directory\", # OpenFOAM apps use \"Case Directory\" instead of \"Input Directory\"\n", - ")\n", - "print(json.dumps(job_dict, indent=2, default=str))" - ] + "outputs": [], + "source": "# Generate job request dictionary using app defaults\njob_dict = ds.jobs.generate(\n app_id=app_id_to_use,\n input_dir_uri=input_uri,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n archive_system=\"designsafe\",\n extra_env_vars=openfoam_env_vars,\n input_dir_param_name=\"Case Directory\", # OpenFOAM apps use \"Case Directory\" instead of \"Input Directory\"\n)\nprint(json.dumps(job_dict, indent=2, default=str))" }, { "cell_type": "code", @@ -277,73 +205,10 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "--- Submitting Tapis Job Request ---\n", - "{\n", - " \"name\": \"openfoam-stampede3-20250609_190344\",\n", - " \"appId\": \"openfoam-stampede3\",\n", - " \"appVersion\": \"12.0.0\",\n", - " \"description\": \"OpenFOAM is free, open source software for computational fluid dynamics (CFD).\",\n", - " \"execSystemId\": \"stampede3\",\n", - " \"archiveSystemId\": \"designsafe.storage.default\",\n", - " \"archiveSystemDir\": \"${EffectiveUserId}/tapis-jobs-archive/${JobCreateDate}/${JobUUID}\",\n", - " \"archiveOnAppError\": true,\n", - " \"execSystemLogicalQueue\": \"skx-dev\",\n", - " \"nodeCount\": 1,\n", - " \"coresPerNode\": 2,\n", - " \"maxMinutes\": 10,\n", - " \"memoryMB\": 192000,\n", - " \"isMpi\": false,\n", - " \"tags\": [],\n", - " \"fileInputs\": [\n", - " {\n", - " \"name\": \"Case Directory\",\n", - " \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/OpenFOAM/DH1_run\",\n", - " \"autoMountLocal\": true,\n", - " \"targetPath\": \"inputDirectory\"\n", - " }\n", - " ],\n", - " \"parameterSet\": {\n", - " \"envVariables\": [\n", - " {\n", - " \"key\": \"mesh\",\n", - " \"value\": \"On\"\n", - " },\n", - " {\n", - " \"key\": \"solver\",\n", - " \"value\": \"pisoFoam\"\n", - " },\n", - " {\n", - " \"key\": \"decomp\",\n", - " \"value\": \"On\"\n", - " }\n", - " ],\n", - " \"schedulerOptions\": [\n", - " {\n", - " \"name\": \"TACC Allocation\",\n", - " \"arg\": \"-A ASC25049\"\n", - " }\n", - " ]\n", - " }\n", - "}\n", - "------------------------------------\n", - "Job submitted successfully. UUID: d491964f-8e39-499c-9307-1bfdd455cc73-007\n", - "Job UUID: d491964f-8e39-499c-9307-1bfdd455cc73-007\n" - ] - } - ], - "source": [ - "# Submit the job to TACC\n", - "submitted_job = ds.jobs.submit_request(job_dict)\n", - "print(f\"Job UUID: {submitted_job.uuid}\")" - ] + "outputs": [], + "source": "# Submit the job to TACC\nsubmitted_job = ds.jobs.submit(job_dict)\nprint(f\"Job UUID: {submitted_job.uuid}\")" }, { "cell_type": "code", @@ -714,4 +579,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file diff --git a/examples/openfoam/openfoam.ipynb b/examples/openfoam/openfoam.ipynb index 9838e2d..bcaf8bb 100644 --- a/examples/openfoam/openfoam.ipynb +++ b/examples/openfoam/openfoam.ipynb @@ -650,92 +650,10 @@ }, { "cell_type": "code", - "execution_count": 52, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Generating job request for app 'openfoam-stampede3'...\n", - "Using App Details: openfoam-stampede3 v12.0.0\n", - "Auto-detected input parameter: 'Case Directory' (provided: 'Input Directory')\n", - "script_filename is None, skipping script parameter placement.\n", - "Adding/Updating TACC allocation: ASC25049\n", - "Job request dictionary generated successfully.\n", - "\n", - "--- Generated Job Request Dictionary ---\n", - "{\n", - " \"name\": \"OpenFOAM-DAPI-Demo\",\n", - " \"appId\": \"openfoam-stampede3\",\n", - " \"appVersion\": \"12.0.0\",\n", - " \"description\": \"OpenFOAM is free, open source software for computational fluid dynamics (CFD).\",\n", - " \"execSystemId\": \"stampede3\",\n", - " \"archiveSystemId\": \"designsafe.storage.default\",\n", - " \"archiveSystemDir\": \"${EffectiveUserId}/tapis-jobs-archive/${JobCreateDate}/${JobUUID}\",\n", - " \"archiveOnAppError\": true,\n", - " \"execSystemLogicalQueue\": \"skx-dev\",\n", - " \"nodeCount\": 1,\n", - " \"coresPerNode\": 2,\n", - " \"maxMinutes\": 5,\n", - " \"memoryMB\": 192000,\n", - " \"isMpi\": false,\n", - " \"tags\": [],\n", - " \"fileInputs\": [\n", - " {\n", - " \"name\": \"Case Directory\",\n", - " \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/OpenFOAM/DH1_run\",\n", - " \"autoMountLocal\": true,\n", - " \"targetPath\": \"inputDirectory\"\n", - " }\n", - " ],\n", - " \"parameterSet\": {\n", - " \"envVariables\": [\n", - " {\n", - " \"key\": \"mesh\",\n", - " \"value\": \"On\"\n", - " },\n", - " {\n", - " \"key\": \"solver\",\n", - " \"value\": \"pisoFoam\"\n", - " },\n", - " {\n", - " \"key\": \"decomp\",\n", - " \"value\": \"On\"\n", - " }\n", - " ],\n", - " \"schedulerOptions\": [\n", - " {\n", - " \"name\": \"TACC Allocation\",\n", - " \"arg\": \"-A ASC25049\"\n", - " }\n", - " ]\n", - " }\n", - "}\n", - "---------------------------------------\n" - ] - } - ], - "source": [ - "# Generate job request using DAPI\n", - "\n", - "job_dict = ds.jobs.generate_request(\n", - " app_id=app_id,\n", - " input_dir_uri=input_uri,\n", - " max_minutes=max_job_minutes,\n", - " allocation=tacc_allocation,\n", - " archive_system=archive_system,\n", - " job_name=job_name,\n", - " node_count=node_count, # Optional resource override\n", - " cores_per_node=cores_per_node, # Optional resource override\n", - " extra_env_vars=openfoam_env_vars,\n", - ")\n", - "\n", - "\n", - "print(\"\\n--- Generated Job Request Dictionary ---\")\n", - "print(json.dumps(job_dict, indent=2, default=str))\n", - "print(\"---------------------------------------\")" - ] + "outputs": [], + "source": "# Generate job request using DAPI\n\njob_dict = ds.jobs.generate(\n app_id=app_id,\n input_dir_uri=input_uri,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n archive_system=archive_system,\n job_name=job_name,\n node_count=node_count, # Optional resource override\n cores_per_node=cores_per_node, # Optional resource override\n extra_env_vars=openfoam_env_vars,\n)\n\n\nprint(\"\\n--- Generated Job Request Dictionary ---\")\nprint(json.dumps(job_dict, indent=2, default=str))\nprint(\"---------------------------------------\")" }, { "cell_type": "markdown", @@ -746,71 +664,10 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "--- Submitting Tapis Job Request ---\n", - "{\n", - " \"name\": \"OpenFOAM-DAPI-Demo\",\n", - " \"appId\": \"openfoam-stampede3\",\n", - " \"appVersion\": \"12.0.0\",\n", - " \"description\": \"OpenFOAM is free, open source software for computational fluid dynamics (CFD).\",\n", - " \"execSystemId\": \"stampede3\",\n", - " \"archiveSystemId\": \"designsafe.storage.default\",\n", - " \"archiveSystemDir\": \"${EffectiveUserId}/tapis-jobs-archive/${JobCreateDate}/${JobUUID}\",\n", - " \"archiveOnAppError\": true,\n", - " \"execSystemLogicalQueue\": \"skx-dev\",\n", - " \"nodeCount\": 1,\n", - " \"coresPerNode\": 2,\n", - " \"maxMinutes\": 5,\n", - " \"memoryMB\": 192000,\n", - " \"isMpi\": false,\n", - " \"tags\": [],\n", - " \"fileInputs\": [\n", - " {\n", - " \"name\": \"Case Directory\",\n", - " \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/OpenFOAM/DH1_run\",\n", - " \"autoMountLocal\": true,\n", - " \"targetPath\": \"inputDirectory\"\n", - " }\n", - " ],\n", - " \"parameterSet\": {\n", - " \"envVariables\": [\n", - " {\n", - " \"key\": \"mesh\",\n", - " \"value\": \"On\"\n", - " },\n", - " {\n", - " \"key\": \"solver\",\n", - " \"value\": \"pisoFoam\"\n", - " },\n", - " {\n", - " \"key\": \"decomp\",\n", - " \"value\": \"On\"\n", - " }\n", - " ],\n", - " \"schedulerOptions\": [\n", - " {\n", - " \"name\": \"TACC Allocation\",\n", - " \"arg\": \"-A ASC25049\"\n", - " }\n", - " ]\n", - " }\n", - "}\n", - "------------------------------------\n", - "Job submitted successfully. UUID: 314cac7f-bb28-4a56-a440-47374ccd11ce-007\n" - ] - } - ], - "source": [ - "# Submit the job\n", - "submitted_job = ds.jobs.submit_request(job_dict)" - ] + "outputs": [], + "source": "# Submit the job\nsubmitted_job = ds.jobs.submit(job_dict)" }, { "cell_type": "markdown", @@ -1175,4 +1032,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file diff --git a/examples/opensees/OpenSeesMP-dapi.ipynb b/examples/opensees/OpenSeesMP-dapi.ipynb index f8b4cbd..f7c388b 100644 --- a/examples/opensees/OpenSeesMP-dapi.ipynb +++ b/examples/opensees/OpenSeesMP-dapi.ipynb @@ -204,31 +204,8 @@ "execution_count": null, "id": "5286f5ac", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Generating job request for app 'opensees-mp-s3'...\n", - "Using App Details: opensees-mp-s3 vlatest\n", - "Placing script 'Main_multiMotion.tcl' in appArgs: 'Main Script'\n", - "Adding allocation: ASC25049\n", - "Job request dictionary generated successfully.\n" - ] - } - ], - "source": [ - "# Generate job request dictionary using app defaults\n", - "job_dict = ds.jobs.generate_request(\n", - " app_id=app_id,\n", - " input_dir_uri=input_uri,\n", - " script_filename=input_filename,\n", - " archive_system=archive_system,\n", - " max_minutes=max_job_minutes,\n", - " allocation=tacc_allocation,\n", - " queue=queue,\n", - ")" - ] + "outputs": [], + "source": "# Generate job request dictionary using app defaults\njob_dict = ds.jobs.generate(\n app_id=app_id,\n input_dir_uri=input_uri,\n script_filename=input_filename,\n archive_system=archive_system,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n queue=queue,\n)" }, { "cell_type": "code", @@ -303,70 +280,11 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "id": "cell-12", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "--- Submitting Tapis Job Request ---\n", - "{\n", - " \"name\": \"opensees-MP-multiMotion-dapi\",\n", - " \"appId\": \"opensees-mp-s3\",\n", - " \"appVersion\": \"latest\",\n", - " \"description\": \"Runs all the processors in parallel. Requires understanding of parallel processing and the capabilities to write parallel scripts.\",\n", - " \"execSystemId\": \"stampede3\",\n", - " \"archiveSystemId\": \"designsafe.storage.default\",\n", - " \"archiveSystemDir\": \"${EffectiveUserId}/tapis-jobs-archive/${JobCreateDate}/${JobUUID}\",\n", - " \"archiveOnAppError\": true,\n", - " \"execSystemLogicalQueue\": \"skx\",\n", - " \"nodeCount\": 1,\n", - " \"coresPerNode\": 16,\n", - " \"maxMinutes\": 60,\n", - " \"memoryMB\": 192000,\n", - " \"isMpi\": false,\n", - " \"tags\": [],\n", - " \"fileInputs\": [\n", - " {\n", - " \"name\": \"Input Directory\",\n", - " \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/opensees/OpenSeesMP_multiMotion/DS_input\",\n", - " \"autoMountLocal\": true,\n", - " \"targetPath\": \"inputDirectory\"\n", - " }\n", - " ],\n", - " \"parameterSet\": {\n", - " \"appArgs\": [\n", - " {\n", - " \"name\": \"Main Script\",\n", - " \"arg\": \"Main_multiMotion.tcl\"\n", - " }\n", - " ],\n", - " \"schedulerOptions\": [\n", - " {\n", - " \"name\": \"TACC Allocation\",\n", - " \"arg\": \"-A ASC25049\"\n", - " }\n", - " ]\n", - " }\n", - "}\n", - "------------------------------------\n", - "Job submitted successfully. UUID: 414da416-817a-48db-a70a-0fa79b6bc348-007\n", - "Job launched with UUID: 414da416-817a-48db-a70a-0fa79b6bc348-007\n", - "Can also check in DesignSafe portal under - Workspace > Tools & Application > Job Status\n" - ] - } - ], - "source": [ - "# Submit job using dapi\n", - "submitted_job = ds.jobs.submit_request(job_dict)\n", - "print(f\"Job launched with UUID: {submitted_job.uuid}\")\n", - "print(\n", - " \"Can also check in DesignSafe portal under - Workspace > Tools & Application > Job Status\"\n", - ")" - ] + "outputs": [], + "source": "# Submit job using dapi\nsubmitted_job = ds.jobs.submit(job_dict)\nprint(f\"Job launched with UUID: {submitted_job.uuid}\")\nprint(\n \"Can also check in DesignSafe portal under - Workspace > Tools & Application > Job Status\"\n)" }, { "cell_type": "code", @@ -659,4 +577,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/examples/pylauncher_sweep.ipynb b/examples/pylauncher_sweep.ipynb new file mode 100644 index 0000000..8dc985c --- /dev/null +++ b/examples/pylauncher_sweep.ipynb @@ -0,0 +1,128 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# PyLauncher Parameter Sweeps with dapi\n", + "\n", + "This notebook demonstrates how to use dapi's parameter sweep utilities to generate\n", + "PyLauncher task lists and submit sweep jobs on DesignSafe.\n", + "\n", + "**PyLauncher** runs many independent serial tasks within a single SLURM allocation —\n", + "ideal for parameter studies, Monte Carlo simulations, and batch processing." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install --user dapi --quiet" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from dapi import DSClient\n", + "\n", + "ds = DSClient()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Part 1: Generic Demo — simulate.py with --alpha / --beta\n", + "\n", + "A simple example using the Agnostic App to sweep over two parameters." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Step 1: Define the parameter sweep" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sweep = {\n", + " \"ALPHA\": [0.3, 0.5, 3.7],\n", + " \"BETA\": [1.1, 2.0, 3.0],\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": "### Step 2: Preview all combinations (dry run)" + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": "df = ds.jobs.parametric_sweep.generate(\n 'python3 simulate.py --alpha ALPHA --beta BETA',\n sweep,\n preview=True,\n)\nprint(f\"Total runs: {len(df)}\")\ndf" + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": "### Step 3: Generate sweep files + Submit\n\nWrites `runsList.txt` and `call_pylauncher.py`, then submits the job.\nUncomment to actually run." + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": "import os\n\ninput_dir = os.path.expanduser(\"~/MyData/pylauncher_demo/\")\n\n# ds.jobs.parametric_sweep.generate(\n# 'python3 simulate.py --alpha ALPHA --beta BETA '\n# '--output \"$WORK/sweep_$SLURM_JOB_ID/run_ALPHA_BETA\"',\n# sweep,\n# input_dir,\n# debug=\"host+job\",\n# )\n#\n# job = ds.jobs.parametric_sweep.submit(\n# \"/MyData/pylauncher_demo/\",\n# app_id=\"agnostic\",\n# allocation=\"your_allocation\",\n# node_count=1,\n# cores_per_node=48,\n# max_minutes=30,\n# )\n# job.monitor()" + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## Part 2: OpenSees Demo — Cantilever Pushover with NodalMass Sweep\n", + "\n", + "A real-world example sweeping structural parameters for a cantilever pushover\n", + "analysis using `openseespy-s3`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": "sweep_opensees = {\n \"NODAL_MASS\": [4.19, 4.39, 4.59, 4.79, 4.99],\n \"LCOL\": [100, 200, 300],\n}\n\n# Preview\nds.jobs.parametric_sweep.generate(\n \"python3 cantilever.py --mass NODAL_MASS --lcol LCOL\",\n sweep_opensees,\n preview=True,\n)" + }, + { + "cell_type": "code", + "source": "# ds.jobs.parametric_sweep.generate(\n# \"python3 cantilever.py --mass NODAL_MASS --lcol LCOL \"\n# \"--outDir out_NODAL_MASS_LCOL\",\n# sweep_opensees,\n# os.path.expanduser(\"~/MyData/opensees_sweep/\"),\n# )\n#\n# job = ds.jobs.parametric_sweep.submit(\n# \"/MyData/opensees_sweep/\",\n# app_id=\"openseespy-s3\",\n# allocation=\"your_allocation\",\n# node_count=2,\n# cores_per_node=48,\n# max_minutes=60,\n# )\n# job.monitor()", + "metadata": {}, + "execution_count": null, + "outputs": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.0" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} \ No newline at end of file diff --git a/myst.yml b/myst.yml index 2a8474c..1fd623c 100644 --- a/myst.yml +++ b/myst.yml @@ -29,6 +29,7 @@ project: - file: docs/examples/mpm.md - file: docs/examples/opensees.md - file: docs/examples/openfoam.md + - file: docs/examples/pylauncher.md - file: docs/examples/tms_credentials.md - file: docs/examples/database.md - title: API Reference @@ -36,6 +37,7 @@ project: - file: docs/api/index.md - file: docs/api/client.md - file: docs/api/jobs.md + - file: docs/api/launcher.md - file: docs/api/files.md - file: docs/api/apps.md - file: docs/api/systems.md diff --git a/tests/jobs/test_parametric_sweep.py b/tests/jobs/test_parametric_sweep.py new file mode 100644 index 0000000..a6d8b58 --- /dev/null +++ b/tests/jobs/test_parametric_sweep.py @@ -0,0 +1,150 @@ +import unittest +import tempfile +from pathlib import Path + +import pandas as pd + +from dapi.launcher import generate_sweep + + +class TestGenerate(unittest.TestCase): + """Tests for generate_sweep() writing mode.""" + + def test_empty_sweep_returns_base_command(self): + with tempfile.TemporaryDirectory() as d: + cmds = generate_sweep("python run.py", {}, d) + self.assertEqual(cmds, ["python run.py"]) + + def test_single_param(self): + with tempfile.TemporaryDirectory() as d: + cmds = generate_sweep("python run.py --alpha ALPHA", {"ALPHA": [1, 2, 3]}, d) + self.assertEqual(len(cmds), 3) + self.assertEqual(cmds[0], "python run.py --alpha 1") + self.assertEqual(cmds[2], "python run.py --alpha 3") + + def test_multi_param_cartesian_product(self): + with tempfile.TemporaryDirectory() as d: + cmds = generate_sweep("python run.py --a A --b B", {"A": [1, 2], "B": [10, 20]}, d) + self.assertEqual(len(cmds), 4) + self.assertIn("python run.py --a 1 --b 10", cmds) + self.assertIn("python run.py --a 2 --b 20", cmds) + + def test_deterministic_order(self): + with tempfile.TemporaryDirectory() as d: + cmds = generate_sweep("X Y", {"X": [1, 2], "Y": ["a", "b"]}, d) + self.assertEqual(cmds, ["1 a", "1 b", "2 a", "2 b"]) + + def test_token_placeholder(self): + with tempfile.TemporaryDirectory() as d: + cmds = generate_sweep("echo ALPHA", {"ALPHA": [1]}, d, placeholder_style="token") + self.assertEqual(cmds, ["echo 1"]) + + def test_braces_placeholder(self): + with tempfile.TemporaryDirectory() as d: + cmds = generate_sweep("echo {ALPHA}", {"ALPHA": [1]}, d, placeholder_style="braces") + self.assertEqual(cmds, ["echo 1"]) + + def test_env_vars_preserved(self): + with tempfile.TemporaryDirectory() as d: + cmds = generate_sweep('run --out "$WORK/$SLURM_JOB_ID" --a A', {"A": [1]}, d) + self.assertIn("$WORK", cmds[0]) + self.assertIn("$SLURM_JOB_ID", cmds[0]) + + def test_float_values(self): + with tempfile.TemporaryDirectory() as d: + cmds = generate_sweep("run --mass MASS", {"MASS": [4.19, 4.39]}, d) + self.assertEqual(cmds[0], "run --mass 4.19") + + def test_string_sweep_raises(self): + with tempfile.TemporaryDirectory() as d: + with self.assertRaises(TypeError): + generate_sweep("echo X", {"X": "bad"}, d) + + def test_empty_sequence_raises(self): + with tempfile.TemporaryDirectory() as d: + with self.assertRaises(ValueError): + generate_sweep("echo X", {"X": []}, d) + + def test_invalid_placeholder_raises(self): + with tempfile.TemporaryDirectory() as d: + with self.assertRaises(ValueError): + generate_sweep("echo X", {"X": [1]}, d, placeholder_style="bad") + + def test_missing_directory_raises(self): + with self.assertRaises(ValueError): + generate_sweep("echo X", {"X": [1]}) + + def test_writes_both_files(self): + with tempfile.TemporaryDirectory() as d: + generate_sweep("cmd --a A", {"A": [1, 2]}, d) + self.assertTrue((Path(d) / "runsList.txt").exists()) + self.assertTrue((Path(d) / "call_pylauncher.py").exists()) + + def test_tasklist_format(self): + with tempfile.TemporaryDirectory() as d: + generate_sweep("cmd --a A", {"A": [1, 2, 3]}, d) + content = (Path(d) / "runsList.txt").read_text(encoding="utf-8") + lines = content.strip().split("\n") + self.assertEqual(lines, ["cmd --a 1", "cmd --a 2", "cmd --a 3"]) + self.assertTrue(content.endswith("\n")) + + def test_creates_parent_dirs(self): + with tempfile.TemporaryDirectory() as d: + deep = Path(d) / "deep" / "nested" + generate_sweep("cmd", {}, deep) + self.assertTrue((deep / "runsList.txt").exists()) + + def test_launcher_script_no_debug(self): + with tempfile.TemporaryDirectory() as d: + generate_sweep("cmd", {}, d) + content = (Path(d) / "call_pylauncher.py").read_text(encoding="utf-8") + self.assertIn("import pylauncher", content) + self.assertIn('ClassicLauncher("runsList.txt")', content) + self.assertNotIn("debug", content) + + def test_launcher_script_with_debug(self): + with tempfile.TemporaryDirectory() as d: + generate_sweep("cmd", {}, d, debug="host+job") + content = (Path(d) / "call_pylauncher.py").read_text(encoding="utf-8") + self.assertIn('debug="host+job"', content) + + +class TestPreview(unittest.TestCase): + """Tests for generate_sweep(preview=True).""" + + def test_returns_dataframe(self): + df = generate_sweep("cmd", {"A": [1, 2], "B": [10, 20]}, preview=True) + self.assertIsInstance(df, pd.DataFrame) + self.assertEqual(df.shape, (4, 2)) + + def test_empty_sweep(self): + df = generate_sweep("cmd", {}, preview=True) + self.assertIsInstance(df, pd.DataFrame) + self.assertEqual(len(df), 0) + + def test_column_order(self): + df = generate_sweep("cmd", {"BETA": [1], "ALPHA": [2]}, preview=True) + self.assertEqual(list(df.columns), ["BETA", "ALPHA"]) + + def test_values(self): + df = generate_sweep("cmd", {"X": [1, 2], "Y": [10, 20]}, preview=True) + self.assertEqual(df.iloc[0]["X"], 1) + self.assertEqual(df.iloc[0]["Y"], 10) + + def test_no_files_written(self): + with tempfile.TemporaryDirectory() as d: + generate_sweep("cmd", {"A": [1]}, d, preview=True) + self.assertFalse((Path(d) / "runsList.txt").exists()) + + def test_directory_not_required(self): + # Should not raise even without directory + df = generate_sweep("cmd", {"A": [1]}, preview=True) + self.assertEqual(len(df), 1) + + def test_validation_still_applies(self): + with self.assertRaises(TypeError): + generate_sweep("cmd", {"X": "bad"}, preview=True) + + +if __name__ == "__main__": + unittest.main() From ea399b80904d423a89452f63b568b7d452ea71d9 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 14:46:30 -0500 Subject: [PATCH 10/21] Rename commands for brevity --- dapi/client.py | 41 +++--- dapi/exceptions.py | 2 +- dapi/launcher.py | 16 +-- docs/examples/mpm.md | 10 +- docs/examples/openfoam.md | 10 +- docs/examples/opensees.md | 12 +- docs/jobs.md | 6 +- docs/quickstart.md | 6 +- examples/mpm/mpm-minimal.ipynb | 37 +---- examples/mpm/mpm.ipynb | 176 ++--------------------- examples/openfoam/openfoam-minimal.ipynb | 55 ++----- examples/openfoam/openfoam.ipynb | 38 +---- examples/opensees/OpenSeesMP-dapi.ipynb | 37 +---- 13 files changed, 87 insertions(+), 359 deletions(-) diff --git a/dapi/client.py b/dapi/client.py index 953e2aa..adf760c 100644 --- a/dapi/client.py +++ b/dapi/client.py @@ -169,11 +169,9 @@ def __init__(self, tapis_client: Tapis): """ self._tapis = tapis_client - def translate_path_to_uri(self, *args, **kwargs) -> str: + def to_uri(self, *args, **kwargs) -> str: """Translate DesignSafe-style paths to Tapis URIs. - This is a convenience wrapper around files_module.get_ds_path_uri(). - Args: *args: Positional arguments passed to get_ds_path_uri(). **kwargs: Keyword arguments passed to get_ds_path_uri(). @@ -187,11 +185,9 @@ def translate_path_to_uri(self, *args, **kwargs) -> str: """ return files_module.get_ds_path_uri(self._tapis, *args, **kwargs) - def translate_uri_to_path(self, *args, **kwargs) -> str: + def to_path(self, *args, **kwargs) -> str: """Translate Tapis URIs to DesignSafe local paths. - This is a convenience wrapper around files_module.tapis_uri_to_local_path(). - Args: *args: Positional arguments passed to tapis_uri_to_local_path(). **kwargs: Keyword arguments passed to tapis_uri_to_local_path(). @@ -200,7 +196,7 @@ def translate_uri_to_path(self, *args, **kwargs) -> str: str: The corresponding DesignSafe local path (e.g., /home/jupyter/MyData/path). Example: - >>> local_path = ds.files.translate_uri_to_path("tapis://designsafe.storage.default/user/data") + >>> local_path = ds.files.to_path("tapis://designsafe.storage.default/user/data") >>> print(local_path) # "/home/jupyter/MyData/data" """ return files_module.tapis_uri_to_local_path(*args, **kwargs) @@ -270,7 +266,7 @@ def __init__(self, tapis_client: Tapis): """ self._tapis = tapis_client - def list_queues(self, system_id: str, verbose: bool = True) -> List[Any]: + def queues(self, system_id: str, verbose: bool = True) -> List[Any]: """List logical queues available on a Tapis execution system. This is a convenience wrapper around systems_module.list_system_queues(). @@ -379,7 +375,7 @@ def __init__(self, tapis_client): def generate( self, - base_command: str, + command: str, sweep: Dict[str, Any], directory: str = None, *, @@ -392,12 +388,12 @@ def generate( With ``preview=True``, returns a DataFrame of all parameter combinations — no files are written. - Otherwise, expands *base_command* into one command per combination + Otherwise, expands *command* into one command per combination and writes ``runsList.txt`` and ``call_pylauncher.py`` into *directory*. Returns the list of generated commands. Args: - base_command: Command template with placeholders matching sweep keys. + command: Command template with placeholders matching sweep keys. sweep: Mapping of placeholder name to sequence of values. directory: Directory to write files into (created if needed). Required when *preview* is ``False``. @@ -411,7 +407,7 @@ def generate( *preview* is ``True``. """ return launcher_module.generate_sweep( - base_command, sweep, directory, + command, sweep, directory, placeholder_style=placeholder_style, debug=debug, preview=preview, ) @@ -617,23 +613,22 @@ def submit(self, job_request: Dict[str, Any]) -> SubmittedJob: """ return jobs_module.submit_job_request(self._tapis, job_request) - # --- Management methods remain the same --- - def get(self, job_uuid: str) -> SubmittedJob: - """Get a SubmittedJob object for managing an existing job by UUID. + def job(self, job_uuid: str) -> SubmittedJob: + """Get a SubmittedJob object for an existing job by UUID. Args: job_uuid (str): The UUID of an existing Tapis job. Returns: - SubmittedJob: A SubmittedJob object for monitoring and managing the job. + SubmittedJob: A job object for monitoring via ``.monitor()``. Example: - >>> job = ds.jobs.get("12345678-1234-1234-1234-123456789abc") - >>> status = job.status + >>> job = ds.jobs.job("12345678-1234-1234-1234-123456789abc") + >>> job.monitor() """ return SubmittedJob(self._tapis, job_uuid) - def get_status(self, job_uuid: str) -> str: + def status(self, job_uuid: str) -> str: """Get the current status of a job by UUID. Args: @@ -646,12 +641,12 @@ def get_status(self, job_uuid: str) -> str: JobMonitorError: If status retrieval fails. Example: - >>> status = ds.jobs.get_status("12345678-1234-1234-1234-123456789abc") - >>> print(f"Job status: {status}") + >>> ds.jobs.status("12345678-1234-1234-1234-123456789abc") + 'FINISHED' """ return jobs_module.get_job_status(self._tapis, job_uuid) - def get_runtime_summary(self, job_uuid: str, verbose: bool = False): + def runtime_summary(self, job_uuid: str, verbose: bool = False): """Print the runtime summary for a job by UUID. Args: @@ -660,7 +655,7 @@ def get_runtime_summary(self, job_uuid: str, verbose: bool = False): Defaults to False. Example: - >>> ds.jobs.get_runtime_summary("12345678-1234-1234-1234-123456789abc") + >>> ds.jobs.runtime_summary("12345678-1234-1234-1234-123456789abc") Runtime Summary --------------- QUEUED time: 00:05:30 diff --git a/dapi/exceptions.py b/dapi/exceptions.py index 28446c6..2ef9609 100644 --- a/dapi/exceptions.py +++ b/dapi/exceptions.py @@ -109,7 +109,7 @@ class SystemInfoError(DapiException): Example: >>> try: - ... queues = client.systems.list_queues("nonexistent-system") + ... queues = client.systems.queues("nonexistent-system") ... except SystemInfoError as e: ... print(f"System info retrieval failed: {e}") """ diff --git a/dapi/launcher.py b/dapi/launcher.py index 8ffe175..addc989 100644 --- a/dapi/launcher.py +++ b/dapi/launcher.py @@ -27,13 +27,13 @@ def _validate_sweep(sweep: Mapping[str, Sequence[Any]]) -> None: def _expand_commands( - base_command: str, + command: str, sweep: Mapping[str, Sequence[Any]], placeholder_style: str, ) -> List[str]: """Expand a command template into all parameter combinations.""" if not sweep: - return [base_command] + return [command] _validate_sweep(sweep) @@ -43,7 +43,7 @@ def _expand_commands( keys = list(sweep.keys()) commands: List[str] = [] for combo in product(*[sweep[k] for k in keys]): - cmd = base_command + cmd = command for k, v in zip(keys, combo): if placeholder_style == "token": cmd = cmd.replace(k, str(v)) @@ -55,7 +55,7 @@ def _expand_commands( def generate_sweep( - base_command: str, + command: str, sweep: Mapping[str, Sequence[Any]], directory: Union[str, Path, None] = None, *, @@ -69,19 +69,19 @@ def generate_sweep( combinations without writing any files — useful for inspecting the sweep in a notebook before committing. - When *preview* is ``False`` (default), expands *base_command* into one + When *preview* is ``False`` (default), expands *command* into one command per parameter combination and writes ``runsList.txt`` and ``call_pylauncher.py`` into *directory*. Args: - base_command: Command template containing placeholders that match + command: Command template containing placeholders that match keys in *sweep*. Environment variables like ``$WORK`` or ``$SLURM_JOB_ID`` are left untouched. sweep: Mapping of placeholder name to a sequence of values. Example: ``{"ALPHA": [0.3, 0.5], "BETA": [1, 2]}``. directory: Directory to write files into. Created if it doesn't exist. Required when *preview* is ``False``. - placeholder_style: How placeholders appear in *base_command*: + placeholder_style: How placeholders appear in *command*: - ``"token"`` (default): bare tokens, e.g. ``ALPHA`` - ``"braces"``: brace-wrapped, e.g. ``{ALPHA}`` @@ -113,7 +113,7 @@ def generate_sweep( if directory is None: raise ValueError("directory is required when preview=False.") - commands = _expand_commands(base_command, sweep, placeholder_style) + commands = _expand_commands(command, sweep, placeholder_style) dirpath = Path(directory) dirpath.mkdir(parents=True, exist_ok=True) diff --git a/docs/examples/mpm.md b/docs/examples/mpm.md index 46ac409..c2f218c 100644 --- a/docs/examples/mpm.md +++ b/docs/examples/mpm.md @@ -88,7 +88,7 @@ mpm_config = { ```python # Convert DesignSafe path to Tapis URI format -input_uri = ds.files.translate_path_to_uri(ds_path) +input_uri = ds.files.to_uri(ds_path) print(f"Input Directory Tapis URI: {input_uri}") ``` @@ -232,7 +232,7 @@ ds.jobs.interpret_status(final_status, submitted_job.uuid) submitted_job.print_runtime_summary(verbose=False) # Get current job status -current_status = ds.jobs.get_status(submitted_job.uuid) +current_status = ds.jobs.status(submitted_job.uuid) print(f"Current status: {current_status}") # Display last status message from TACC @@ -243,7 +243,7 @@ print(f"Last message: {submitted_job.last_message}") - **`interpret_status`**: Provides human-readable explanation of job outcome - **`print_runtime_summary`**: Shows time spent in each job phase (queued, running, etc.) -- **`get_status`**: Gets current job status (useful for checking later) +- **`status`**: Gets current job status (useful for checking later) - **`last_message`**: Shows last status message from the job scheduler ### Step 10: View Job Output @@ -307,7 +307,7 @@ typical_outputs = { ```python # Convert archive URI to local path for analysis -archive_path = ds.files.translate_uri_to_path(archive_uri) +archive_path = ds.files.to_path(archive_uri) print(f"Archive path: {archive_path}") # Import analysis libraries @@ -333,7 +333,7 @@ else: **What this does:** -- **`translate_uri_to_path`**: Converts Tapis URI to local file system path +- **`to_path`**: Converts Tapis URI to local file system path - **`os.listdir`**: Lists files in the results directory - **`.vtu files`**: VTK unstructured grid files for visualization - **ParaView**: Recommended tool for visualizing MPM particle data \ No newline at end of file diff --git a/docs/examples/openfoam.md b/docs/examples/openfoam.md index 7961f75..603a642 100644 --- a/docs/examples/openfoam.md +++ b/docs/examples/openfoam.md @@ -90,7 +90,7 @@ solvers = { ```python # Convert DesignSafe path to Tapis URI format -input_uri = ds.files.translate_path_to_uri(ds_path) +input_uri = ds.files.to_uri(ds_path) print(f"Input Directory Tapis URI: {input_uri}") ``` @@ -239,7 +239,7 @@ ds.jobs.interpret_status(final_status, submitted_job.uuid) submitted_job.print_runtime_summary(verbose=False) # Get current job status -current_status = ds.jobs.get_status(submitted_job.uuid) +current_status = ds.jobs.status(submitted_job.uuid) print(f"Current status: {current_status}") # Display last status message from TACC @@ -250,7 +250,7 @@ print(f"Last message: {submitted_job.last_message}") - **`interpret_status`**: Provides human-readable explanation of job outcome - **`print_runtime_summary`**: Shows time spent in each job phase (queued, running, etc.) -- **`get_status`**: Gets current job status (useful for checking later) +- **`status`**: Gets current job status (useful for checking later) - **`last_message`**: Shows last status message from the job scheduler ### Step 10: View Job Output @@ -302,7 +302,7 @@ typical_outputs = { ```python # Convert archive URI to local path for analysis -archive_path = ds.files.translate_uri_to_path(archive_uri) +archive_path = ds.files.to_path(archive_uri) print(f"Archive path: {archive_path}") # Import plotting libraries @@ -322,7 +322,7 @@ print(f"Loaded force coefficients data with shape: {data.shape}") **What this does:** -- **`translate_uri_to_path`**: Converts Tapis URI to local file system path +- **`to_path`**: Converts Tapis URI to local file system path - **`pandas.read_csv`**: Reads force coefficient data (much cleaner than manual parsing) - **`skiprows=9`**: Skips OpenFOAM header lines - **`sep='\t'`**: Uses tab separator (OpenFOAM default) diff --git a/docs/examples/opensees.md b/docs/examples/opensees.md index 6bbf7f3..727c7b2 100644 --- a/docs/examples/opensees.md +++ b/docs/examples/opensees.md @@ -80,7 +80,7 @@ control_corespernode: int = 16 # Cores per node for parallel analysis ```python # Convert DesignSafe path to Tapis URI format -input_uri = ds.files.translate_path_to_uri(ds_path) +input_uri = ds.files.to_uri(ds_path) print(f"Input Directory Tapis URI: {input_uri}") ``` @@ -225,7 +225,7 @@ ds.jobs.interpret_status(final_status, submitted_job.uuid) submitted_job.print_runtime_summary(verbose=False) # Get current job status -current_status = ds.jobs.get_status(submitted_job.uuid) +current_status = ds.jobs.status(submitted_job.uuid) print(f"Current status: {current_status}") # Display last status message from TACC @@ -236,7 +236,7 @@ print(f"Last message: {submitted_job.last_message}") - **`interpret_status`**: Provides human-readable explanation of job outcome - **`print_runtime_summary`**: Shows time spent in each job phase (queued, running, etc.) -- **`get_status`**: Gets current job status (useful for checking later) +- **`status`**: Gets current job status (useful for checking later) - **`last_message`**: Shows last status message from the job scheduler ### Step 10: Access Job Archive and Results @@ -247,7 +247,7 @@ archive_uri = submitted_job.archive_uri print(f"Archive URI: {archive_uri}") # Translate archive URI to local DesignSafe path -local_archive_path = ds.files.translate_uri_to_path(archive_uri) +local_archive_path = ds.files.to_path(archive_uri) print(f"Local archive path: {local_archive_path}") # List archive contents @@ -260,7 +260,7 @@ for item in archive_files: **What this does:** - **`archive_uri`**: Location where job results are stored -- **`translate_uri_to_path`**: Converts Tapis URI to local path for analysis +- **`to_path`**: Converts Tapis URI to local path for analysis - **`ds.files.list`**: Lists all files and directories in the archive - Shows output files like analysis results, output data, and logs @@ -290,7 +290,7 @@ try: print(f"- {item.name} ({item.type})") # Change to the archive directory for post-processing - archive_path = ds.files.translate_uri_to_path(input_dir_archive_uri) + archive_path = ds.files.to_path(input_dir_archive_uri) os.chdir(archive_path) print(f"\nChanged to directory: {archive_path}") diff --git a/docs/jobs.md b/docs/jobs.md index c63662a..277dc8c 100644 --- a/docs/jobs.md +++ b/docs/jobs.md @@ -108,7 +108,7 @@ print(f"Default Cores: {app_details.jobAttributes.coresPerNode}") ```python # 1. Prepare input directory input_path = "/MyData/analysis/input/" -input_uri = ds.files.translate_path_to_uri(input_path, verify_exists=True) +input_uri = ds.files.to_uri(input_path, verify_exists=True) # 2. Generate job request job_request = ds.jobs.generate( @@ -403,7 +403,7 @@ for job in jobs: ```python # List available queues for a system -frontera_queues = ds.systems.list_queues("frontera") +frontera_queues = ds.systems.queues("frontera") for queue in frontera_queues: print(f"Queue: {queue.name}") print(f"Max runtime: {queue.maxRequestedTime} minutes") @@ -419,7 +419,7 @@ print(f"Development queue available: {dev_queue_exists}") ```python # Get system information try: - queues = ds.systems.list_queues("stampede3") + queues = ds.systems.queues("stampede3") print(f"Stampede3 has {len(queues)} available queues") except Exception as e: print(f"Cannot access Stampede3: {e}") diff --git a/docs/quickstart.md b/docs/quickstart.md index 71cdef3..b2dfb3d 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -90,7 +90,7 @@ app_details = ds.apps.get_details("mpm-s3", verbose=True) ```python # Translate DesignSafe paths to TAPIS URIs input_path = "/MyData/mpm-benchmarks/2d/uniaxial_stress/" -input_uri = ds.files.translate_path_to_uri(input_path, verify_exists=True) +input_uri = ds.files.to_uri(input_path, verify_exists=True) print(f"Input URI: {input_uri}") # List files in the directory @@ -316,13 +316,13 @@ except Exception as e: ### 1. Always Verify Paths ```python # Good - verify path exists -input_uri = ds.files.translate_path_to_uri( +input_uri = ds.files.to_uri( "/MyData/analysis/", verify_exists=True ) # Risk - path might not exist -input_uri = ds.files.translate_path_to_uri("/MyData/analysis/") +input_uri = ds.files.to_uri("/MyData/analysis/") ``` ### 2. Use Descriptive Job Names diff --git a/examples/mpm/mpm-minimal.ipynb b/examples/mpm/mpm-minimal.ipynb index d691233..46f4c15 100644 --- a/examples/mpm/mpm-minimal.ipynb +++ b/examples/mpm/mpm-minimal.ipynb @@ -133,24 +133,11 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "3f0ee687", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Translated '/MyData/mpm-benchmarks/2d/uniaxial_stress/' to 'tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/' using t.username\n", - "Input Directory Tapis URI: tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/\n" - ] - } - ], - "source": [ - "# Convert DesignSafe path to Tapis URI format\n", - "input_uri = ds.files.translate_path_to_uri(ds_path)\n", - "print(f\"Input Directory Tapis URI: {input_uri}\")" - ] + "outputs": [], + "source": "# Convert DesignSafe path to Tapis URI format\ninput_uri = ds.files.to_uri(ds_path)\nprint(f\"Input Directory Tapis URI: {input_uri}\")" }, { "cell_type": "code", @@ -344,23 +331,11 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "id": "6437373b", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Current status: FINISHED\n" - ] - } - ], - "source": [ - "# Get current job status\n", - "current_status = ds.jobs.get_status(submitted_job.uuid)\n", - "print(f\"Current status: {current_status}\")" - ] + "outputs": [], + "source": "# Get current job status\ncurrent_status = ds.jobs.status(submitted_job.uuid)\nprint(f\"Current status: {current_status}\")" }, { "cell_type": "code", diff --git a/examples/mpm/mpm.ipynb b/examples/mpm/mpm.ipynb index c1c9dd3..08cd0c2 100644 --- a/examples/mpm/mpm.ipynb +++ b/examples/mpm/mpm.ipynb @@ -189,27 +189,11 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "3f0ee687", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Translated '/MyData/mpm-benchmarks/2d/uniaxial_stress/' to 'tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/' using t.username\n", - "Input Directory Tapis URI: tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/\n" - ] - } - ], - "source": [ - "try:\n", - " input_uri = ds.files.translate_path_to_uri(ds_path)\n", - " print(f\"Input Directory Tapis URI: {input_uri}\")\n", - "except Exception as e:\n", - " print(f\"Error translating path '{ds_path}': {e}\")\n", - " raise SystemExit(\"Stopping notebook due to path translation error.\")" - ] + "outputs": [], + "source": "try:\n input_uri = ds.files.to_uri(ds_path)\n print(f\"Input Directory Tapis URI: {input_uri}\")\nexcept Exception as e:\n print(f\"Error translating path '{ds_path}': {e}\")\n raise SystemExit(\"Stopping notebook due to path translation error.\")" }, { "cell_type": "code", @@ -426,38 +410,11 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "id": "6437373b", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Fetching status for job 52f48eaf-b7d6-4964-a97b-a4b32a6aaeb3-007 using ds.jobs.get_status()...\n", - "Status of job 52f48eaf-b7d6-4964-a97b-a4b32a6aaeb3-007: FINISHED\n" - ] - } - ], - "source": [ - "if \"ds\" in locals() and \"submitted_job\" in locals(): # Check if ds and a job exist\n", - " job_uuid_to_check = submitted_job.uuid # Or any other job UUID string\n", - " try:\n", - " print(\n", - " f\"\\nFetching status for job {job_uuid_to_check} using ds.jobs.get_status()...\"\n", - " )\n", - " current_status = ds.jobs.get_status(job_uuid_to_check)\n", - " print(f\"Status of job {job_uuid_to_check}: {current_status}\")\n", - " except JobMonitorError as e:\n", - " print(f\"Error getting job status: {e}\")\n", - " except Exception as e:\n", - " print(f\"An unexpected error occurred: {e}\")\n", - "else:\n", - " print(\n", - " \"DSClient ('ds') or submitted_job not initialized. Cannot demonstrate ds.jobs.get_status().\"\n", - " )" - ] + "outputs": [], + "source": "if \"ds\" in locals() and \"submitted_job\" in locals(): # Check if ds and a job exist\n job_uuid_to_check = submitted_job.uuid # Or any other job UUID string\n try:\n print(\n f\"\\nFetching status for job {job_uuid_to_check} using ds.jobs.status()...\"\n )\n current_status = ds.jobs.status(job_uuid_to_check)\n print(f\"Status of job {job_uuid_to_check}: {current_status}\")\n except JobMonitorError as e:\n print(f\"Error getting job status: {e}\")\n except Exception as e:\n print(f\"An unexpected error occurred: {e}\")\nelse:\n print(\n \"DSClient ('ds') or submitted_job not initialized. Cannot demonstrate ds.jobs.status().\"\n )" }, { "cell_type": "code", @@ -848,57 +805,11 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "id": "9aaef98a", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "--- System Queue Information ---\n", - "\n", - "Fetching queue information for system 'frontera'...\n", - "Found 10 batch logical queues for system 'frontera':\n", - " - Name: flex (HPC Queue: flex, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 2880, Max Nodes: 128)\n", - " - Name: development (HPC Queue: development, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 120, Max Nodes: 40)\n", - " - Name: normal (HPC Queue: normal, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 2880, Max Nodes: 512)\n", - " - Name: large (HPC Queue: large, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 2880, Max Nodes: 2048)\n", - " - Name: debug (HPC Queue: debug, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 2880, Max Nodes: 8368)\n", - " - Name: rtx (HPC Queue: rtx, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 2880, Max Nodes: 22)\n", - " - Name: rtx-dev (HPC Queue: rtx-dev, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 120, Max Nodes: 2)\n", - " - Name: nvdimm (HPC Queue: nvdimm, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 120, Max Nodes: 4)\n", - " - Name: small (HPC Queue: small, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 2880, Max Nodes: 2)\n", - " - Name: grace (HPC Queue: grace, Max Jobs: -1, Max User Jobs: N/A, Max Mins: 7200, Max Nodes: 30)\n", - "\n", - "Does 'development' queue exist on Frontera? True\n", - "\n", - "Fetching queue information for system 'non-existent-system'...\n", - "Error getting system info: Failed to retrieve queues for system 'non-existent-system': message: SYSAPI_NOT_FOUND Record not found. jwtTenant: designsafe jwtUser: kks32 OboTenant: designsafe OboUser: kks32 System: non-existent-system\n", - "-----------------------------\n" - ] - } - ], - "source": [ - "# --- Example: List Queues for Frontera ---\n", - "try:\n", - " print(\"\\n--- System Queue Information ---\")\n", - " frontera_queues = ds.systems.list_queues(\"frontera\")\n", - " # You can now inspect the 'frontera_queues' list\n", - " # Example: Find if 'development' queue exists\n", - " dev_queue_exists = any(q.name == \"development\" for q in frontera_queues)\n", - " print(f\"Does 'development' queue exist on Frontera? {dev_queue_exists}\")\n", - "\n", - " # Example: List queues for a non-existent system\n", - " ds.systems.list_queues(\"non-existent-system\") # This would raise SystemInfoError\n", - "\n", - "except SystemInfoError as e:\n", - " print(f\"Error getting system info: {e}\")\n", - "except Exception as e:\n", - " print(f\"An unexpected error occurred: {e}\")\n", - "print(\"-----------------------------\")" - ] + "outputs": [], + "source": "# --- Example: List Queues for Frontera ---\ntry:\n print(\"\\n--- System Queue Information ---\")\n frontera_queues = ds.systems.queues(\"frontera\")\n # You can now inspect the 'frontera_queues' list\n # Example: Find if 'development' queue exists\n dev_queue_exists = any(q.name == \"development\" for q in frontera_queues)\n print(f\"Does 'development' queue exist on Frontera? {dev_queue_exists}\")\n\n # Example: List queues for a non-existent system\n ds.systems.queues(\"non-existent-system\") # This would raise SystemInfoError\n\nexcept SystemInfoError as e:\n print(f\"Error getting system info: {e}\")\nexcept Exception as e:\n print(f\"An unexpected error occurred: {e}\")\nprint(\"-----------------------------\")" }, { "cell_type": "markdown", @@ -910,74 +821,11 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "id": "e074a3c3", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Translating and verifying path: /MyData/mpm-benchmarks/2d/uniaxial_stress/\n", - "Translated '/MyData/mpm-benchmarks/2d/uniaxial_stress/' to 'tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/' using t.username\n", - "Verifying existence of translated path: tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/\n", - "Checking system 'designsafe.storage.default' for path 'kks32/mpm-benchmarks/2d/uniaxial_stress/'...\n", - "Verification successful: Path exists.\n", - "Input Directory Tapis URI (verified): tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/\n", - "\n", - "Translating and verifying non-existent path: /MyData/this/path/does/not/exist/\n", - "Translated '/MyData/this/path/does/not/exist/' to 'tapis://designsafe.storage.default/kks32/this/path/does/not/exist/' using t.username\n", - "Verifying existence of translated path: tapis://designsafe.storage.default/kks32/this/path/does/not/exist/\n", - "Checking system 'designsafe.storage.default' for path 'kks32/this/path/does/not/exist/'...\n", - "Error during path translation/verification: Verification error for path 'kks32/this/path/does/not/exist/' on system 'designsafe.storage.default': message: FILES_CLIENT_SSH_NOT_FOUND Path not found. OboTenant: designsafe OboUser: kks32 System: designsafe.storage.default EffectiveUser: kks32 Host: cloud.data.tacc.utexas.edu RootDir: /data/designsafe/mydata Path: kks32/this/path/does/not/exist\n" - ] - }, - { - "ename": "SystemExit", - "evalue": "Stopping notebook due to path verification error.", - "output_type": "error", - "traceback": [ - "An exception has occurred, use %tb to see the full traceback.\n", - "\u001b[0;31mSystemExit\u001b[0m\u001b[0;31m:\u001b[0m Stopping notebook due to path verification error.\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/krishna/Library/Caches/pypoetry/virtualenvs/dapi-ptztLUqK-py3.13/lib/python3.13/site-packages/IPython/core/interactiveshell.py:3585: UserWarning: To exit: use 'exit', 'quit', or Ctrl-D.\n", - " warn(\"To exit: use 'exit', 'quit', or Ctrl-D.\", stacklevel=1)\n" - ] - } - ], - "source": [ - "# --- Translate Path with Verification ---\n", - "ds_path: str = \"/MyData/mpm-benchmarks/2d/uniaxial_stress/\"\n", - "ds_path_nonexistent: str = \"/MyData/this/path/does/not/exist/\"\n", - "\n", - "try:\n", - " # Translate and verify the existing path\n", - " print(f\"\\nTranslating and verifying path: {ds_path}\")\n", - " input_uri = ds.files.translate_path_to_uri(ds_path, verify_exists=True)\n", - " print(f\"Input Directory Tapis URI (verified): {input_uri}\")\n", - "\n", - " # Example: Try translating a non-existent path with verification (will raise error)\n", - " print(f\"\\nTranslating and verifying non-existent path: {ds_path_nonexistent}\")\n", - " input_uri_bad = ds.files.translate_path_to_uri(\n", - " ds_path_nonexistent, verify_exists=True\n", - " )\n", - " print(f\"This line should not be reached.\")\n", - "\n", - "except FileOperationError as e:\n", - " print(f\"Error during path translation/verification: {e}\")\n", - " # Decide how to handle the error (e.g., stop notebook, use default, etc.)\n", - " # For this example, we'll stop if verification fails.\n", - " raise SystemExit(\"Stopping notebook due to path verification error.\")\n", - "except Exception as e:\n", - " print(f\"An unexpected error occurred during path translation: {e}\")\n", - " raise SystemExit(\"Stopping notebook due to unexpected path translation error.\")" - ] + "outputs": [], + "source": "# --- Translate Path with Verification ---\nds_path: str = \"/MyData/mpm-benchmarks/2d/uniaxial_stress/\"\nds_path_nonexistent: str = \"/MyData/this/path/does/not/exist/\"\n\ntry:\n # Translate and verify the existing path\n print(f\"\\nTranslating and verifying path: {ds_path}\")\n input_uri = ds.files.to_uri(ds_path, verify_exists=True)\n print(f\"Input Directory Tapis URI (verified): {input_uri}\")\n\n # Example: Try translating a non-existent path with verification (will raise error)\n print(f\"\\nTranslating and verifying non-existent path: {ds_path_nonexistent}\")\n input_uri_bad = ds.files.to_uri(\n ds_path_nonexistent, verify_exists=True\n )\n print(f\"This line should not be reached.\")\n\nexcept FileOperationError as e:\n print(f\"Error during path translation/verification: {e}\")\n # Decide how to handle the error (e.g., stop notebook, use default, etc.)\n # For this example, we'll stop if verification fails.\n raise SystemExit(\"Stopping notebook due to path verification error.\")\nexcept Exception as e:\n print(f\"An unexpected error occurred during path translation: {e}\")\n raise SystemExit(\"Stopping notebook due to unexpected path translation error.\")" } ], "metadata": { diff --git a/examples/openfoam/openfoam-minimal.ipynb b/examples/openfoam/openfoam-minimal.ipynb index bc59e21..8f21436 100644 --- a/examples/openfoam/openfoam-minimal.ipynb +++ b/examples/openfoam/openfoam-minimal.ipynb @@ -112,23 +112,10 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Translated '/MyData/template-notebooks/tapis3/OpenFOAM/DH1_run' to 'tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/OpenFOAM/DH1_run' using t.username\n", - "Input Directory Tapis URI: tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/OpenFOAM/DH1_run\n" - ] - } - ], - "source": [ - "# Convert DesignSafe path to Tapis URI format\n", - "input_uri = ds.files.translate_path_to_uri(ds_path)\n", - "print(f\"Input Directory Tapis URI: {input_uri}\")" - ] + "outputs": [], + "source": "# Convert DesignSafe path to Tapis URI format\ninput_uri = ds.files.to_uri(ds_path)\nprint(f\"Input Directory Tapis URI: {input_uri}\")" }, { "cell_type": "code", @@ -294,22 +281,10 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Current status: FINISHED\n" - ] - } - ], - "source": [ - "# Get current job status\n", - "current_status = ds.jobs.get_status(submitted_job.uuid)\n", - "print(f\"Current status: {current_status}\")" - ] + "outputs": [], + "source": "# Get current job status\ncurrent_status = ds.jobs.status(submitted_job.uuid)\nprint(f\"Current status: {current_status}\")" }, { "cell_type": "code", @@ -444,22 +419,10 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/home/jupyter/MyData/tapis-jobs-archive/2025-06-09Z/d491964f-8e39-499c-9307-1bfdd455cc73-007\n" - ] - } - ], - "source": [ - "# Get DesignSafe Jupyter path\n", - "archive_path = ds.files.translate_uri_to_path(archive_uri)\n", - "print(archive_path)" - ] + "outputs": [], + "source": "# Get DesignSafe Jupyter path\narchive_path = ds.files.to_path(archive_uri)\nprint(archive_path)" }, { "cell_type": "code", diff --git a/examples/openfoam/openfoam.ipynb b/examples/openfoam/openfoam.ipynb index bcaf8bb..f6ef83c 100644 --- a/examples/openfoam/openfoam.ipynb +++ b/examples/openfoam/openfoam.ipynb @@ -620,26 +620,12 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": null, "metadata": { "scrolled": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Translated '/home/jupyter/MyData/template-notebooks/tapis3/OpenFOAM/DH1_run' to 'tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/OpenFOAM/DH1_run' using t.username\n", - "Verifying existence of translated path: tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/OpenFOAM/DH1_run\n", - "Checking system 'designsafe.storage.default' for path 'kks32/template-notebooks/tapis3/OpenFOAM/DH1_run'...\n", - "Verification successful: Path exists.\n" - ] - } - ], - "source": [ - "# Translate local path to Tapis URI\n", - "input_uri = ds.files.translate_path_to_uri(ds_path, verify_exists=True)" - ] + "outputs": [], + "source": "# Translate local path to Tapis URI\ninput_uri = ds.files.to_uri(ds_path, verify_exists=True)" }, { "cell_type": "markdown", @@ -905,22 +891,10 @@ }, { "cell_type": "code", - "execution_count": 57, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/home/jupyter/MyData/tapis-jobs-archive/2025-06-09Z/314cac7f-bb28-4a56-a440-47374ccd11ce-007\n" - ] - } - ], - "source": [ - "# Get DesignSafe Jupyter path\n", - "archive_path = ds.files.translate_uri_to_path(archive_uri)\n", - "print(archive_path)" - ] + "outputs": [], + "source": "# Get DesignSafe Jupyter path\narchive_path = ds.files.to_path(archive_uri)\nprint(archive_path)" }, { "cell_type": "code", diff --git a/examples/opensees/OpenSeesMP-dapi.ipynb b/examples/opensees/OpenSeesMP-dapi.ipynb index f7c388b..ce3a083 100644 --- a/examples/opensees/OpenSeesMP-dapi.ipynb +++ b/examples/opensees/OpenSeesMP-dapi.ipynb @@ -161,23 +161,8 @@ "execution_count": null, "id": "cell-8", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "DesignSafe path: /home/jupyter/MyData/template-notebooks/tapis3/opensees/OpenSeesMP_multiMotion/DS_input\n", - "Translated '/home/jupyter/MyData/template-notebooks/tapis3/opensees/OpenSeesMP_multiMotion/DS_input' to 'tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/opensees/OpenSeesMP_multiMotion/DS_input' using t.username\n", - "Input URI: tapis://designsafe.storage.default/kks32/template-notebooks/tapis3/opensees/OpenSeesMP_multiMotion/DS_input\n" - ] - } - ], - "source": [ - "ds_path = os.getcwd() + \"/DS_input\"\n", - "print(f\"DesignSafe path: {ds_path}\")\n", - "input_uri = ds.files.translate_path_to_uri(ds_path)\n", - "print(f\"Input URI: {input_uri}\")" - ] + "outputs": [], + "source": "ds_path = os.getcwd() + \"/DS_input\"\nprint(f\"DesignSafe path: {ds_path}\")\ninput_uri = ds.files.to_uri(ds_path)\nprint(f\"Input URI: {input_uri}\")" }, { "cell_type": "code", @@ -506,23 +491,11 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "id": "96757814-3a1f-4981-9a3b-f27fc86a553c", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/home/jupyter/MyData/tapis-jobs-archive/2025-06-06Z/414da416-817a-48db-a70a-0fa79b6bc348-007/inputDirectory\n" - ] - } - ], - "source": [ - "# Get DesignSafe Jupyter path\n", - "archive_path = ds.files.translate_uri_to_path(input_dir_archive_uri)\n", - "print(archive_path)" - ] + "outputs": [], + "source": "# Get DesignSafe Jupyter path\narchive_path = ds.files.to_path(input_dir_archive_uri)\nprint(archive_path)" }, { "cell_type": "code", From 8f476db3d80b31c2ea1187f2a97e3cec1f61fe09 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 15:00:20 -0500 Subject: [PATCH 11/21] Update pylauncher notebook sweep example --- examples/pylauncher_sweep.ipynb | 344 ++++++++++++++++++++++++++++++-- 1 file changed, 329 insertions(+), 15 deletions(-) diff --git a/examples/pylauncher_sweep.ipynb b/examples/pylauncher_sweep.ipynb index 8dc985c..c57b861 100644 --- a/examples/pylauncher_sweep.ipynb +++ b/examples/pylauncher_sweep.ipynb @@ -10,7 +10,12 @@ "PyLauncher task lists and submit sweep jobs on DesignSafe.\n", "\n", "**PyLauncher** runs many independent serial tasks within a single SLURM allocation —\n", - "ideal for parameter studies, Monte Carlo simulations, and batch processing." + "ideal for parameter studies, Monte Carlo simulations, and batch processing.\n", + "\n", + "**What this notebook covers:**\n", + "\n", + "1. **Generic demo** — a minimal `simulate.py` with `--alpha`/`--beta` parameters\n", + "2. **OpenSees demo** — Silvia Mazzoni's cantilever pushover with `--NodalMass`/`--LCol` sweep" ] }, { @@ -28,6 +33,7 @@ "metadata": {}, "outputs": [], "source": [ + "import os\n", "from dapi import DSClient\n", "\n", "ds = DSClient()" @@ -37,16 +43,65 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Part 1: Generic Demo — simulate.py with --alpha / --beta\n", + "---\n", + "\n", + "## Part 1: Generic Demo\n", + "\n", + "A simple example sweeping over two parameters (`--alpha`, `--beta`). The script\n", + "computes `result = alpha * beta`, writes it to a JSON output file, and prints a summary.\n", + "This pattern works with any app — the commands in `runsList.txt` are just shell commands.\n", + "\n", + "### Write the script" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "input_dir_generic = os.path.expanduser(\"~/MyData/pylauncher_demo/\")\n", + "os.makedirs(input_dir_generic, exist_ok=True)\n", + "\n", + "simulate_script = '''\\\n", + "\"\"\"simulate.py — minimal demo script for PyLauncher parameter sweeps.\n", + "\n", + "Accepts --alpha, --beta, and --output via command line.\n", + "Computes result = alpha * beta and writes it to the output directory.\n", + "\"\"\"\n", + "import argparse\n", + "import os\n", + "import json\n", "\n", - "A simple example using the Agnostic App to sweep over two parameters." + "parser = argparse.ArgumentParser()\n", + "parser.add_argument(\"--alpha\", type=float, required=True)\n", + "parser.add_argument(\"--beta\", type=float, required=True)\n", + "parser.add_argument(\"--output\", type=str, required=True)\n", + "args = parser.parse_args()\n", + "\n", + "os.makedirs(args.output, exist_ok=True)\n", + "\n", + "result = args.alpha * args.beta\n", + "summary = {\"alpha\": args.alpha, \"beta\": args.beta, \"result\": result}\n", + "\n", + "outfile = os.path.join(args.output, \"result.json\")\n", + "with open(outfile, \"w\") as f:\n", + " json.dump(summary, f, indent=2)\n", + "\n", + "print(f\"alpha={args.alpha}, beta={args.beta} -> result={result:.4f} written to {outfile}\")\n", + "'''\n", + "\n", + "with open(os.path.join(input_dir_generic, \"simulate.py\"), \"w\") as f:\n", + " f.write(simulate_script)\n", + "\n", + "print(f\"Wrote {input_dir_generic}simulate.py\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 1: Define the parameter sweep" + "### Define the sweep" ] }, { @@ -64,26 +119,82 @@ { "cell_type": "markdown", "metadata": {}, - "source": "### Step 2: Preview all combinations (dry run)" + "source": [ + "### Preview (dry run)" + ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], - "source": "df = ds.jobs.parametric_sweep.generate(\n 'python3 simulate.py --alpha ALPHA --beta BETA',\n sweep,\n preview=True,\n)\nprint(f\"Total runs: {len(df)}\")\ndf" + "source": [ + "ds.jobs.parametric_sweep.generate(\n", + " 'python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA',\n", + " sweep,\n", + " preview=True,\n", + ")" + ] }, { "cell_type": "markdown", "metadata": {}, - "source": "### Step 3: Generate sweep files + Submit\n\nWrites `runsList.txt` and `call_pylauncher.py`, then submits the job.\nUncomment to actually run." + "source": [ + "### Generate sweep files" + ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], - "source": "import os\n\ninput_dir = os.path.expanduser(\"~/MyData/pylauncher_demo/\")\n\n# ds.jobs.parametric_sweep.generate(\n# 'python3 simulate.py --alpha ALPHA --beta BETA '\n# '--output \"$WORK/sweep_$SLURM_JOB_ID/run_ALPHA_BETA\"',\n# sweep,\n# input_dir,\n# debug=\"host+job\",\n# )\n#\n# job = ds.jobs.parametric_sweep.submit(\n# \"/MyData/pylauncher_demo/\",\n# app_id=\"agnostic\",\n# allocation=\"your_allocation\",\n# node_count=1,\n# cores_per_node=48,\n# max_minutes=30,\n# )\n# job.monitor()" + "source": [ + "commands = ds.jobs.parametric_sweep.generate(\n", + " 'python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA',\n", + " sweep,\n", + " input_dir_generic,\n", + " debug=\"host+job\",\n", + ")\n", + "\n", + "print(f\"Generated {len(commands)} task commands\\n\")\n", + "print(\"=== runsList.txt ===\")\n", + "with open(os.path.join(input_dir_generic, \"runsList.txt\")) as f:\n", + " print(f.read())\n", + "\n", + "print(\"=== call_pylauncher.py ===\")\n", + "with open(os.path.join(input_dir_generic, \"call_pylauncher.py\")) as f:\n", + " print(f.read())\n", + "\n", + "print(\"=== Files in input directory ===\")\n", + "for fn in sorted(os.listdir(input_dir_generic)):\n", + " print(f\" {fn}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Submit\n", + "\n", + "Replace `your_allocation` with your TACC allocation and uncomment to run." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# job = ds.jobs.parametric_sweep.submit(\n", + "# \"/MyData/pylauncher_demo/\",\n", + "# app_id=\"agnostic\",\n", + "# allocation=\"your_allocation\",\n", + "# node_count=1,\n", + "# cores_per_node=48,\n", + "# max_minutes=30,\n", + "# )\n", + "# job.monitor()" + ] }, { "cell_type": "markdown", @@ -91,10 +202,38 @@ "source": [ "---\n", "\n", - "## Part 2: OpenSees Demo — Cantilever Pushover with NodalMass Sweep\n", + "## Part 2: OpenSees Cantilever Pushover Sweep\n", "\n", - "A real-world example sweeping structural parameters for a cantilever pushover\n", - "analysis using `openseespy-s3`." + "A real-world example based on Silvia Mazzoni's cantilever pushover analysis.\n", + "We sweep over `NodalMass` and `LCol` (column length) to study how these structural\n", + "parameters affect the pushover response.\n", + "\n", + "The cantilever model:\n", + "```\n", + " ^Y\n", + " |\n", + " 2 __\n", + " | |\n", + " | |\n", + " | |\n", + " (1) LCol\n", + " | |\n", + " | |\n", + " | |\n", + " =1= ---- -------->X\n", + "```\n", + "\n", + "- Node 1: fixed base\n", + "- Node 2: free top with `NodalMass`\n", + "- Elastic beam-column element\n", + "- Gravity load (2000 kip downward) followed by lateral pushover (displacement-controlled)\n", + "\n", + "### Write the analysis script\n", + "\n", + "This is the OpenSeesPy cantilever pushover script adapted from\n", + "[Silvia Mazzoni's example](https://opensees.berkeley.edu/wiki/index.php/Examples_Manual).\n", + "It accepts `--NodalMass`, `--LCol`, and `--outDir` as command-line arguments\n", + "so PyLauncher can run each parameter combination independently." ] }, { @@ -102,14 +241,189 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "sweep_opensees = {\n \"NODAL_MASS\": [4.19, 4.39, 4.59, 4.79, 4.99],\n \"LCOL\": [100, 200, 300],\n}\n\n# Preview\nds.jobs.parametric_sweep.generate(\n \"python3 cantilever.py --mass NODAL_MASS --lcol LCOL\",\n sweep_opensees,\n preview=True,\n)" + "source": [ + "input_dir_opensees = os.path.expanduser(\"~/MyData/opensees_sweep/\")\n", + "os.makedirs(input_dir_opensees, exist_ok=True)\n", + "\n", + "cantilever_script = '''\\\n", + "# Ex1a.Canti2D.Push — OpenSeesPy cantilever pushover\n", + "# Adapted from Silvia Mazzoni & Frank McKenna, 2006/2020\n", + "# Units: kip, inch, second\n", + "#\n", + "# Command-line arguments (set by PyLauncher per task):\n", + "# --NodalMass mass at free node\n", + "# --LCol column length\n", + "# --outDir output directory for this run\n", + "\n", + "import argparse\n", + "import os\n", + "\n", + "if os.path.exists(\"opensees.so\"):\n", + " import opensees as ops\n", + "else:\n", + " import openseespy.opensees as ops\n", + "\n", + "parser = argparse.ArgumentParser()\n", + "parser.add_argument(\"--NodalMass\", type=float, required=True)\n", + "parser.add_argument(\"--LCol\", type=float, required=True)\n", + "parser.add_argument(\"--outDir\", type=str, required=True)\n", + "args = parser.parse_args()\n", + "\n", + "NodalMass = args.NodalMass\n", + "LCol = args.LCol\n", + "outDir = args.outDir\n", + "\n", + "os.makedirs(outDir, exist_ok=True)\n", + "print(f\"Running: NodalMass={NodalMass}, LCol={LCol}, outDir={outDir}\")\n", + "\n", + "ops.wipe()\n", + "ops.model(\"basic\", \"-ndm\", 2, \"-ndf\", 3)\n", + "\n", + "# Geometry\n", + "ops.node(1, 0, 0)\n", + "ops.node(2, 0, LCol)\n", + "ops.fix(1, 1, 1, 1)\n", + "ops.mass(2, NodalMass, 0.0, 0.0)\n", + "\n", + "# Element\n", + "ops.geomTransf(\"Linear\", 1)\n", + "ops.element(\"elasticBeamColumn\", 1, 1, 2, 3600000000, 4227, 1080000, 1)\n", + "\n", + "# Recorders\n", + "ops.recorder(\"Node\", \"-file\", f\"{outDir}/DFree.out\", \"-time\", \"-node\", 2, \"-dof\", 1, 2, 3, \"disp\")\n", + "ops.recorder(\"Node\", \"-file\", f\"{outDir}/RBase.out\", \"-time\", \"-node\", 1, \"-dof\", 1, 2, 3, \"reaction\")\n", + "ops.recorder(\"Element\", \"-file\", f\"{outDir}/FCol.out\", \"-time\", \"-ele\", 1, \"globalForce\")\n", + "\n", + "# Gravity analysis\n", + "ops.timeSeries(\"Linear\", 1)\n", + "ops.pattern(\"Plain\", 1, 1)\n", + "ops.load(2, 0.0, -2000.0, 0.0)\n", + "ops.wipeAnalysis()\n", + "ops.constraints(\"Plain\")\n", + "ops.numberer(\"Plain\")\n", + "ops.system(\"BandGeneral\")\n", + "ops.test(\"NormDispIncr\", 1.0e-8, 6)\n", + "ops.algorithm(\"Newton\")\n", + "ops.integrator(\"LoadControl\", 0.1)\n", + "ops.analysis(\"Static\")\n", + "ops.analyze(10)\n", + "ops.loadConst(\"-time\", 0.0)\n", + "\n", + "# Pushover analysis\n", + "ops.timeSeries(\"Linear\", 2)\n", + "ops.pattern(\"Plain\", 2, 2)\n", + "ops.load(2, 2000.0, 0.0, 0.0)\n", + "ops.integrator(\"DisplacementControl\", 2, 1, 0.1)\n", + "ops.analyze(1000)\n", + "\n", + "print(f\"Done: NodalMass={NodalMass}, LCol={LCol}\")\n", + "'''\n", + "\n", + "with open(os.path.join(input_dir_opensees, \"cantilever.py\"), \"w\") as f:\n", + " f.write(cantilever_script)\n", + "\n", + "print(f\"Wrote {input_dir_opensees}cantilever.py\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define the sweep" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sweep_opensees = {\n", + " \"NODAL_MASS\": [4.19, 4.39, 4.59, 4.79, 4.99],\n", + " \"LCOL\": [100, 200, 300],\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Preview" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df = ds.jobs.parametric_sweep.generate(\n", + " \"python3 cantilever.py --NodalMass NODAL_MASS --LCol LCOL --outDir out_NODAL_MASS_LCOL\",\n", + " sweep_opensees,\n", + " preview=True,\n", + ")\n", + "print(f\"Total runs: {len(df)}\")\n", + "df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Generate sweep files" + ] }, { "cell_type": "code", - "source": "# ds.jobs.parametric_sweep.generate(\n# \"python3 cantilever.py --mass NODAL_MASS --lcol LCOL \"\n# \"--outDir out_NODAL_MASS_LCOL\",\n# sweep_opensees,\n# os.path.expanduser(\"~/MyData/opensees_sweep/\"),\n# )\n#\n# job = ds.jobs.parametric_sweep.submit(\n# \"/MyData/opensees_sweep/\",\n# app_id=\"openseespy-s3\",\n# allocation=\"your_allocation\",\n# node_count=2,\n# cores_per_node=48,\n# max_minutes=60,\n# )\n# job.monitor()", + "execution_count": null, "metadata": {}, + "outputs": [], + "source": [ + "commands = ds.jobs.parametric_sweep.generate(\n", + " \"python3 cantilever.py --NodalMass NODAL_MASS --LCol LCOL --outDir out_NODAL_MASS_LCOL\",\n", + " sweep_opensees,\n", + " input_dir_opensees,\n", + ")\n", + "\n", + "print(f\"Generated {len(commands)} task commands\\n\")\n", + "print(\"=== runsList.txt ===\")\n", + "with open(os.path.join(input_dir_opensees, \"runsList.txt\")) as f:\n", + " print(f.read())\n", + "\n", + "print(\"=== call_pylauncher.py ===\")\n", + "with open(os.path.join(input_dir_opensees, \"call_pylauncher.py\")) as f:\n", + " print(f.read())\n", + "\n", + "print(\"=== Files in input directory ===\")\n", + "for fn in sorted(os.listdir(input_dir_opensees)):\n", + " print(f\" {fn}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Submit\n", + "\n", + "Replace `your_allocation` with your TACC allocation and uncomment to run." + ] + }, + { + "cell_type": "code", "execution_count": null, - "outputs": [] + "metadata": {}, + "outputs": [], + "source": [ + "# job = ds.jobs.parametric_sweep.submit(\n", + "# \"/MyData/opensees_sweep/\",\n", + "# app_id=\"openseespy-s3\",\n", + "# allocation=\"your_allocation\",\n", + "# node_count=1,\n", + "# cores_per_node=48,\n", + "# max_minutes=30,\n", + "# )\n", + "# job.monitor()" + ] } ], "metadata": { @@ -125,4 +439,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} From e4ebcb777cbdcf64ac105373ebb9442dda33d5e6 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 16:02:40 -0500 Subject: [PATCH 12/21] Docs --- .github/workflows/docs.yml | 13 +- docs-api/apps.rst | 6 + docs-api/auth.rst | 4 + docs-api/client.rst | 22 ++ docs-api/conf.py | 35 +++ docs-api/database.rst | 12 + docs-api/exceptions.rst | 7 + docs-api/files.rst | 12 + docs-api/index.rst | 18 ++ docs-api/jobs.rst | 34 +++ docs-api/launcher.rst | 4 + docs-api/systems.rst | 12 + docs/api/apps.md | 85 +++++- docs/api/auth.md | 57 +++- docs/api/client.md | 572 +++++++++++++++++++++++++++++++++++-- docs/api/database.md | 187 +++++++++++- docs/api/exceptions.md | 173 +++++++++-- docs/api/files.md | 192 ++++++++++++- docs/api/index.md | 2 +- docs/api/jobs.md | 435 ++++++++++++++++++++++++++-- docs/api/launcher.md | 132 ++++++++- docs/api/systems.md | 181 +++++++++++- docs/installation.md | 2 +- myst.yml | 1 + pyproject.toml | 3 + 25 files changed, 2070 insertions(+), 131 deletions(-) create mode 100644 docs-api/apps.rst create mode 100644 docs-api/auth.rst create mode 100644 docs-api/client.rst create mode 100644 docs-api/conf.py create mode 100644 docs-api/database.rst create mode 100644 docs-api/exceptions.rst create mode 100644 docs-api/files.rst create mode 100644 docs-api/index.rst create mode 100644 docs-api/jobs.rst create mode 100644 docs-api/launcher.rst create mode 100644 docs-api/systems.rst diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 961c86a..23fe657 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -1,4 +1,4 @@ -name: deploy-book +name: deploy-docs on: push: @@ -16,7 +16,7 @@ concurrency: cancel-in-progress: false jobs: - deploy-book: + build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -36,9 +36,14 @@ jobs: - name: Install dependencies run: uv pip install --system ".[docs]" - - name: Build the book + # Build narrative docs (guides, examples) with Jupyter Book / MyST + - name: Build narrative docs run: jupyter-book build --html + # Build API reference with Sphinx + - name: Build API docs + run: sphinx-build -b html docs-api _build/html/api + - name: Upload artifact uses: actions/upload-pages-artifact@v3 with: @@ -49,7 +54,7 @@ jobs: name: github-pages url: ${{ steps.deployment.outputs.page_url }} runs-on: ubuntu-latest - needs: deploy-book + needs: build steps: - name: Deploy to GitHub Pages id: deployment diff --git a/docs-api/apps.rst b/docs-api/apps.rst new file mode 100644 index 0000000..99c99d6 --- /dev/null +++ b/docs-api/apps.rst @@ -0,0 +1,6 @@ +Apps +==== + +.. autofunction:: dapi.apps.find_apps + +.. autofunction:: dapi.apps.get_app_details diff --git a/docs-api/auth.rst b/docs-api/auth.rst new file mode 100644 index 0000000..a3f0fc4 --- /dev/null +++ b/docs-api/auth.rst @@ -0,0 +1,4 @@ +Auth +==== + +.. autofunction:: dapi.auth.init diff --git a/docs-api/client.rst b/docs-api/client.rst new file mode 100644 index 0000000..9e8d9cd --- /dev/null +++ b/docs-api/client.rst @@ -0,0 +1,22 @@ +Client +====== + +.. autoclass:: dapi.client.DSClient + :members: + :undoc-members: + :show-inheritance: + +.. autoclass:: dapi.client.AppMethods + :members: + +.. autoclass:: dapi.client.FileMethods + :members: + +.. autoclass:: dapi.client.JobMethods + :members: + +.. autoclass:: dapi.client.SystemMethods + :members: + +.. autoclass:: dapi.client.ParametricSweepMethods + :members: diff --git a/docs-api/conf.py b/docs-api/conf.py new file mode 100644 index 0000000..1dfc8bb --- /dev/null +++ b/docs-api/conf.py @@ -0,0 +1,35 @@ +"""Sphinx configuration for dapi API reference docs.""" + +project = "dapi" +copyright = "2024, Krishna Kumar, Pedro Arduino, Scott Brandenberg" +author = "Krishna Kumar" + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", +] + +# Napoleon settings (Google-style docstrings) +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_init_with_doc = True + +# Autodoc settings +autodoc_member_order = "bysource" +autodoc_typehints = "description" +autodoc_default_options = { + "members": True, + "undoc-members": True, + "show-inheritance": True, +} + +# Theme +html_theme = "furo" +html_title = "dapi API Reference" +html_theme_options = { + "navigation_with_keys": True, +} + +# Suppress warnings for missing type stubs +nitpicky = False diff --git a/docs-api/database.rst b/docs-api/database.rst new file mode 100644 index 0000000..1e2cd70 --- /dev/null +++ b/docs-api/database.rst @@ -0,0 +1,12 @@ +Database +======== + +.. autoclass:: dapi.db.accessor.DatabaseAccessor + :members: + :undoc-members: + :show-inheritance: + +.. autoclass:: dapi.db.db.DSDatabase + :members: + :undoc-members: + :show-inheritance: diff --git a/docs-api/exceptions.rst b/docs-api/exceptions.rst new file mode 100644 index 0000000..0299b1c --- /dev/null +++ b/docs-api/exceptions.rst @@ -0,0 +1,7 @@ +Exceptions +========== + +.. automodule:: dapi.exceptions + :members: + :undoc-members: + :show-inheritance: diff --git a/docs-api/files.rst b/docs-api/files.rst new file mode 100644 index 0000000..45cd2c2 --- /dev/null +++ b/docs-api/files.rst @@ -0,0 +1,12 @@ +Files +===== + +.. autofunction:: dapi.files.get_ds_path_uri + +.. autofunction:: dapi.files.tapis_uri_to_local_path + +.. autofunction:: dapi.files.upload_file + +.. autofunction:: dapi.files.download_file + +.. autofunction:: dapi.files.list_files diff --git a/docs-api/index.rst b/docs-api/index.rst new file mode 100644 index 0000000..e2f5030 --- /dev/null +++ b/docs-api/index.rst @@ -0,0 +1,18 @@ +dapi API Reference +================== + +Auto-generated API documentation for the ``dapi`` package. + +.. toctree:: + :maxdepth: 2 + :caption: Modules + + client + jobs + launcher + files + apps + systems + auth + database + exceptions diff --git a/docs-api/jobs.rst b/docs-api/jobs.rst new file mode 100644 index 0000000..75319c2 --- /dev/null +++ b/docs-api/jobs.rst @@ -0,0 +1,34 @@ +Jobs +==== + +Module Functions +---------------- + +.. autofunction:: dapi.jobs.generate_job_request + +.. autofunction:: dapi.jobs.submit_job_request + +.. autofunction:: dapi.jobs.get_job_status + +.. autofunction:: dapi.jobs.get_runtime_summary + +.. autofunction:: dapi.jobs.interpret_job_status + +.. autofunction:: dapi.jobs.list_jobs + +SubmittedJob +------------ + +.. autoclass:: dapi.jobs.SubmittedJob + :members: + :undoc-members: + :show-inheritance: + +Status Constants +---------------- + +.. autodata:: dapi.jobs.STATUS_TIMEOUT +.. autodata:: dapi.jobs.STATUS_INTERRUPTED +.. autodata:: dapi.jobs.STATUS_MONITOR_ERROR +.. autodata:: dapi.jobs.STATUS_UNKNOWN +.. autodata:: dapi.jobs.TAPIS_TERMINAL_STATES diff --git a/docs-api/launcher.rst b/docs-api/launcher.rst new file mode 100644 index 0000000..fbbf851 --- /dev/null +++ b/docs-api/launcher.rst @@ -0,0 +1,4 @@ +Launcher +======== + +.. autofunction:: dapi.launcher.generate_sweep diff --git a/docs-api/systems.rst b/docs-api/systems.rst new file mode 100644 index 0000000..57c50e6 --- /dev/null +++ b/docs-api/systems.rst @@ -0,0 +1,12 @@ +Systems +======= + +.. autofunction:: dapi.systems.list_system_queues + +.. autofunction:: dapi.systems.check_credentials + +.. autofunction:: dapi.systems.establish_credentials + +.. autofunction:: dapi.systems.revoke_credentials + +.. autofunction:: dapi.systems.setup_tms_credentials diff --git a/docs/api/apps.md b/docs/api/apps.md index 4ab1d73..d5538d0 100644 --- a/docs/api/apps.md +++ b/docs/api/apps.md @@ -2,14 +2,91 @@ Application discovery and management for DesignSafe computational applications. +All functions below accept an authenticated Tapis client as the first argument. +When using the `DSClient`, the Tapis client is supplied automatically and the +methods are available under `ds.apps`. + +| Module function | Client shorthand | +|---|---| +| `find_apps(t, ...)` | `ds.apps.find(...)` | +| `get_app_details(t, ...)` | `ds.apps.get_details(...)` | + +--- + ## Application Discovery -```{eval-rst} -.. autofunction:: dapi.apps.find_apps +### `find_apps(t, search_term, list_type="ALL", verbose=True)` + +Search for Tapis apps matching a search term. + +Searches through available Tapis applications using partial name matching. +This function helps discover applications available for job submission. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `search_term` (`str`): Name or partial name to search for. Use an empty string for all apps. Supports partial matching with wildcards. +- `list_type` (`str`, optional): Type of apps to list. Must be one of: `"OWNED"`, `"SHARED_PUBLIC"`, `"SHARED_DIRECT"`, `"READ_PERM"`, `"MINE"`, `"ALL"`. Defaults to `"ALL"`. +- `verbose` (`bool`, optional): If `True`, prints a summary of found apps including ID, version, and owner information. Defaults to `True`. + +**Returns:** `List[Tapis]` -- List of matching Tapis app objects with selected fields (`id`, `version`, `owner`). + +**Raises:** + +- `AppDiscoveryError`: If the Tapis API search fails or an unexpected error occurs during the search operation. + +**Example:** + +```python +from dapi.apps import find_apps + +apps = find_apps(client, "matlab", verbose=True) +# Found 3 matching apps: +# - matlab-r2023a (Version: 1.0, Owner: designsafe) +# - matlab-parallel (Version: 2.1, Owner: tacc) +# - matlab-desktop (Version: 1.5, Owner: designsafe) + +# Using DSClient: +apps = ds.apps.find("matlab") ``` +--- + ## Application Details -```{eval-rst} -.. autofunction:: dapi.apps.get_app_details +### `get_app_details(t, app_id, app_version=None, verbose=True)` + +Get detailed information for a specific app ID and version. + +Retrieves comprehensive details about a specific Tapis application, +including job attributes, execution system, and parameter definitions. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `app_id` (`str`): Exact app ID to look up. Must match exactly. +- `app_version` (`Optional[str]`, optional): Specific app version to retrieve. If `None`, fetches the latest available version. Defaults to `None`. +- `verbose` (`bool`, optional): If `True`, prints basic app information including ID, version, owner, execution system, and description. Defaults to `True`. + +**Returns:** `Optional[Tapis]` -- Tapis app object with full details including `jobAttributes`, `parameterSet`, and other configuration. Returns `None` if the app is not found. + +**Raises:** + +- `AppDiscoveryError`: If the Tapis API call fails (except for 404 not found) or an unexpected error occurs during retrieval. + +**Example:** + +```python +from dapi.apps import get_app_details + +app = get_app_details(client, "matlab-r2023a", "1.0") +# App Details: +# ID: matlab-r2023a +# Version: 1.0 +# Owner: designsafe +# Execution System: frontera +# Description: MATLAB R2023a runtime environment + +# Using DSClient: +app = ds.apps.get_details("matlab-r2023a", "1.0") ``` diff --git a/docs/api/auth.md b/docs/api/auth.md index 2b13cc4..e77b831 100644 --- a/docs/api/auth.md +++ b/docs/api/auth.md @@ -4,6 +4,59 @@ Authentication and credential management for DesignSafe access. ## Authentication -```{eval-rst} -.. autofunction:: dapi.auth.init +### `init` + +```python +dapi.auth.init( + base_url: str = "https://designsafe.tapis.io", + username: str = None, + password: str = None, + env_file: str = None, +) -> Tapis ``` + +Initialize and authenticate a Tapis client for DesignSafe. + +Creates and authenticates a Tapis client instance for interacting with DesignSafe resources. The function follows a credential resolution hierarchy and handles secure password input when needed. + +**Credential Resolution Order:** + +1. Explicitly passed `username`/`password` arguments +2. Environment variables (`DESIGNSAFE_USERNAME`, `DESIGNSAFE_PASSWORD`) -- loads from `env_file` if specified, otherwise checks system environment +3. Interactive prompts for missing credentials + +**Parameters:** + +| Name | Type | Default | Description | +|------|------|---------|-------------| +| `base_url` | `str` | `"https://designsafe.tapis.io"` | The Tapis base URL for DesignSafe API endpoints. | +| `username` | `str` | `None` | Explicit DesignSafe username. If `None`, attempts to load from environment or prompts the user. | +| `password` | `str` | `None` | Explicit DesignSafe password. If `None`, attempts to load from environment or prompts the user securely. | +| `env_file` | `str` | `None` | Path to a `.env` file containing credentials. If `None`, attempts to load from a default `.env` file if it exists. | + +**Returns:** + +`Tapis` -- An authenticated `tapipy.Tapis` client object ready for API calls. + +**Raises:** + +- {py:class}`~dapi.exceptions.AuthenticationError` -- If authentication fails due to invalid credentials, network issues, or if required credentials cannot be obtained. + +**Examples:** + +```python +# Using explicit credentials +client = init(username="myuser", password="mypass") + +# Using environment variables or .env file +client = init(env_file=".env") + +# Interactive authentication +client = init() +# Enter DesignSafe Username: myuser +# Enter DesignSafe Password: [hidden] +``` + +:::{note} +The function disables automatic spec downloads for faster initialization. Password input uses `getpass` for secure entry in terminal environments. +::: diff --git a/docs/api/client.md b/docs/api/client.md index 72e260c..245ffcf 100644 --- a/docs/api/client.md +++ b/docs/api/client.md @@ -2,11 +2,50 @@ The main client interface for all DAPI functionality. DSClient provides organized access to DesignSafe resources through the Tapis V3 API. -```{eval-rst} -.. autoclass:: dapi.client.DSClient - :members: - :undoc-members: - :show-inheritance: +## DSClient + +### `DSClient(tapis_client=None, **auth_kwargs)` + +Main client for interacting with DesignSafe resources via Tapis V3 using dapi. + +The DSClient provides a high-level interface for working with DesignSafe resources +through the Tapis V3 API. It handles authentication and provides organized access +to different service areas including applications, files, jobs, systems, and databases. + +**Args:** +- `tapis_client` (Tapis, optional): Pre-authenticated Tapis client instance. If provided, it will be used instead of creating a new one. +- `**auth_kwargs`: Additional authentication arguments passed to `auth.init()` when `tapis_client` is not provided. Common arguments include: + - `username` (str): DesignSafe username + - `password` (str): DesignSafe password + - `base_url` (str): Tapis base URL + - `env_file` (str): Path to `.env` file with credentials + +**Attributes:** +- `tapis` (Tapis): The underlying authenticated Tapis client instance. +- `apps` (AppMethods): Interface for application discovery and details. +- `files` (FileMethods): Interface for file operations (upload, download, list). +- `jobs` (JobMethods): Interface for job submission and monitoring. +- `systems` (SystemMethods): Interface for system information and queues. +- `db` (DatabaseAccessor): Interface for database connections and queries. + +**Raises:** +- `TypeError`: If `tapis_client` is provided but is not a Tapis instance. +- `AuthenticationError`: If authentication fails when creating a new Tapis client. + +**Example:** + +```python +# Basic usage with automatic authentication +ds = DSClient() + +# Using explicit credentials +ds = DSClient(username="myuser", password="mypass") + +# Using a pre-authenticated Tapis client +from tapipy.tapis import Tapis +tapis = Tapis(base_url="https://designsafe.tapis.io", ...) +tapis.get_tokens() +ds = DSClient(tapis_client=tapis) ``` ## Accessing the Raw Tapis Client @@ -31,7 +70,7 @@ jobs = tapis_client.jobs.getJobList() ### When to Use the Raw Tapis Client - Access Tapis APIs not yet wrapped by dapi -- Use advanced search parameters not exposed by dapi +- Use advanced search parameters not exposed by dapi - Implement custom functionality - Debug or troubleshoot API calls - Access experimental or new Tapis features @@ -46,32 +85,519 @@ The DSClient provides access to different DesignSafe services through specialize ### AppMethods -```{eval-rst} -.. autoclass:: dapi.client.AppMethods - :members: - :undoc-members: +Interface for Tapis application discovery and details retrieval. + +#### `find(search_term, list_type="ALL", verbose=True)` + +Search for Tapis apps matching a search term. + +**Args:** +- `search_term` (str): Name or partial name to search for. Use empty string for all apps. Supports partial matching with wildcards. +- `list_type` (str, optional): Type of apps to list. Must be one of: `"OWNED"`, `"SHARED_PUBLIC"`, `"SHARED_DIRECT"`, `"READ_PERM"`, `"MINE"`, `"ALL"`. Defaults to `"ALL"`. +- `verbose` (bool, optional): If `True`, prints summary of found apps including ID, version, and owner information. Defaults to `True`. + +**Returns:** `List[Tapis]` -- List of matching Tapis app objects with selected fields (id, version, owner). + +**Raises:** +- `AppDiscoveryError`: If the Tapis API search fails. + +**Example:** + +```python +apps = ds.apps.find("matlab") +# Found 3 matching apps: +# - matlab-r2023a (Version: 1.0, Owner: designsafe) +# - matlab-parallel (Version: 2.1, Owner: tacc) ``` +--- + +#### `get_details(app_id, app_version=None, verbose=True)` + +Get detailed information for a specific app ID and version. + +**Args:** +- `app_id` (str): Exact app ID to look up. Must match exactly. +- `app_version` (str, optional): Specific app version to retrieve. If `None`, fetches the latest available version. Defaults to `None`. +- `verbose` (bool, optional): If `True`, prints basic app information including ID, version, owner, execution system, and description. Defaults to `True`. + +**Returns:** `Optional[Tapis]` -- Tapis app object with full details including jobAttributes, parameterSet, and other configuration. Returns `None` if the app is not found. + +**Raises:** +- `AppDiscoveryError`: If the Tapis API call fails (except for 404 not found). + +**Example:** + +```python +app = ds.apps.get_details("matlab-r2023a", "1.0") +# App Details: +# ID: matlab-r2023a +# Version: 1.0 +# Execution System: frontera +``` + +--- + ### FileMethods -```{eval-rst} -.. autoclass:: dapi.client.FileMethods - :members: - :undoc-members: +Interface for file operations on Tapis storage systems. + +#### `to_uri(path, verify_exists=False)` + +Translate DesignSafe-style paths to Tapis URIs. + +**Args:** +- `path` (str): The DesignSafe-style path string to translate. Supported formats: + - MyData paths: `"/MyData/folder"`, `"jupyter/MyData/folder"` + - Community paths: `"/CommunityData/folder"` + - Project paths: `"/projects/PRJ-XXXX/folder"` + - Direct Tapis URIs: `"tapis://system-id/path"` +- `verify_exists` (bool, optional): If `True`, verifies the translated path exists on the target Tapis system. Defaults to `False`. + +**Returns:** `str` -- The corresponding Tapis URI (e.g., `"tapis://system-id/path"`). + +**Raises:** +- `FileOperationError`: If path translation fails or verification fails. +- `AuthenticationError`: If username is required for MyData paths but not available. +- `ValueError`: If the input path format is unrecognized. + +**Example:** + +```python +uri = ds.files.to_uri("/MyData/analysis/results") +# "tapis://designsafe.storage.default/username/analysis/results" + +uri = ds.files.to_uri("/projects/PRJ-1234/data", verify_exists=True) +``` + +--- + +#### `to_path(tapis_uri)` + +Translate Tapis URIs to DesignSafe local paths. + +**Args:** +- `tapis_uri` (str): The Tapis URI to convert. Supported formats: + - `"tapis://designsafe.storage.default/username/path"` -> `"/home/jupyter/MyData/path"` + - `"tapis://designsafe.storage.community/path"` -> `"/home/jupyter/CommunityData/path"` + - `"tapis://project-*/path"` -> `"/home/jupyter/MyProjects/path"` + +**Returns:** `str` -- The corresponding DesignSafe local path, or the original URI if it is not a recognized format. + +**Example:** + +```python +local_path = ds.files.to_path("tapis://designsafe.storage.default/user/data/file.txt") +# "/home/jupyter/MyData/data/file.txt" ``` +--- + +#### `upload(local_path, remote_uri)` + +Upload a local file to a Tapis storage system. + +**Args:** +- `local_path` (str): Path to the local file to upload. +- `remote_uri` (str): Tapis URI destination (e.g., `"tapis://system/path/file.txt"`). + +**Raises:** +- `FileNotFoundError`: If the local file does not exist. +- `ValueError`: If `local_path` is not a file or `remote_uri` is invalid. +- `FileOperationError`: If the Tapis upload operation fails. + +**Example:** + +```python +ds.files.upload("/local/data.txt", "tapis://mysystem/uploads/data.txt") +``` + +--- + +#### `download(remote_uri, local_path)` + +Download a file from a Tapis storage system to the local filesystem. + +**Args:** +- `remote_uri` (str): Tapis URI of the file to download (e.g., `"tapis://system/path/file.txt"`). +- `local_path` (str): Local filesystem path where the file should be saved. + +**Raises:** +- `ValueError`: If `local_path` is a directory or `remote_uri` is invalid. +- `FileOperationError`: If the download operation fails. + +**Example:** + +```python +ds.files.download("tapis://mysystem/data/results.txt", "/local/results.txt") +``` + +--- + +#### `list(remote_uri, limit=100, offset=0)` + +List files and directories in a Tapis storage system path. + +**Args:** +- `remote_uri` (str): Tapis URI of the directory to list (e.g., `"tapis://system/path/"`). +- `limit` (int, optional): Maximum number of items to return. Defaults to `100`. +- `offset` (int, optional): Number of items to skip (for pagination). Defaults to `0`. + +**Returns:** `List[Tapis]` -- List of file and directory objects from the specified path. Each object contains metadata like name, size, type, and permissions. + +**Raises:** +- `ValueError`: If `remote_uri` is invalid. +- `FileOperationError`: If the listing operation fails or path not found. + +**Example:** + +```python +files = ds.files.list("tapis://mysystem/data/") +for f in files: + print(f"{f.name} ({f.type})") +``` + +--- + ### JobMethods -```{eval-rst} -.. autoclass:: dapi.client.JobMethods - :members: - :undoc-members: +Interface for Tapis job submission, monitoring, and management. + +**Attributes:** +- `parametric_sweep` (ParametricSweepMethods): Interface for PyLauncher parameter sweep generation and submission. + +#### `generate(app_id, input_dir_uri, *, script_filename=None, app_version=None, job_name=None, description=None, tags=None, max_minutes=None, node_count=None, cores_per_node=None, memory_mb=None, queue=None, allocation=None, archive_system=None, archive_path=None, extra_file_inputs=None, extra_app_args=None, extra_env_vars=None, extra_scheduler_options=None, script_param_names=["Input Script", "Main Script", "tclScript"], input_dir_param_name="Input Directory", allocation_param_name="TACC Allocation")` + +Generate a Tapis job request dictionary based on app definition and inputs. Automatically retrieves app details and applies user-specified overrides and extra parameters. + +**Args:** +- `app_id` (str): The ID of the Tapis application to use for the job. +- `input_dir_uri` (str): Tapis URI to the input directory containing job files. +- `script_filename` (str, optional): Name of the main script file to execute. If `None`, no script parameter is added (suitable for apps like OpenFOAM). +- `app_version` (str, optional): Specific app version. If `None`, uses latest. +- `job_name` (str, optional): Custom job name. If `None`, auto-generates one. +- `description` (str, optional): Job description. If `None`, uses app description. +- `tags` (List[str], optional): List of tags to associate with the job. +- `max_minutes` (int, optional): Maximum runtime in minutes. Overrides app default. +- `node_count` (int, optional): Number of compute nodes. Overrides app default. +- `cores_per_node` (int, optional): Cores per node. Overrides app default. +- `memory_mb` (int, optional): Memory in MB. Overrides app default. +- `queue` (str, optional): Execution queue name. Overrides app default. +- `allocation` (str, optional): TACC allocation to charge for compute time. +- `archive_system` (str, optional): Archive system for job outputs. Use `"designsafe"` for `designsafe.storage.default`. If `None`, uses app default. +- `archive_path` (str, optional): Archive directory path. Can be a full path or just a directory name in MyData. If `None` and `archive_system` is `"designsafe"`, defaults to `"tapis-jobs-archive/${JobCreateDate}/${JobUUID}"`. +- `extra_file_inputs` (List[Dict], optional): Additional file inputs beyond the main input directory. +- `extra_app_args` (List[Dict], optional): Additional application arguments. +- `extra_env_vars` (List[Dict], optional): Additional environment variables. Each item should be `{"key": "VAR_NAME", "value": "var_value"}`. +- `extra_scheduler_options` (List[Dict], optional): Additional scheduler options. +- `script_param_names` (List[str], optional): Parameter names to check for script placement. Defaults to `["Input Script", "Main Script", "tclScript"]`. +- `input_dir_param_name` (str, optional): Parameter name for input directory. Defaults to `"Input Directory"`. +- `allocation_param_name` (str, optional): Parameter name for allocation. Defaults to `"TACC Allocation"`. + +**Returns:** `Dict[str, Any]` -- Complete job request dictionary ready for submission. + +**Raises:** +- `AppDiscoveryError`: If the specified app cannot be found. +- `ValueError`: If required parameters are missing or invalid. +- `JobSubmissionError`: If job request generation fails. + +**Example:** + +```python +job_request = ds.jobs.generate( + app_id="matlab-r2023a", + input_dir_uri="tapis://designsafe.storage.default/username/input/", + script_filename="run_analysis.m", + max_minutes=120, + allocation="MyProject-123", +) +``` + +--- + +#### `submit(job_request)` + +Submit a job request dictionary to Tapis. + +**Args:** +- `job_request` (Dict[str, Any]): Complete job request dictionary (typically from `generate()`). + +**Returns:** `SubmittedJob` -- A SubmittedJob object for monitoring and managing the job. + +**Raises:** +- `ValueError`: If `job_request` is not a dictionary. +- `JobSubmissionError`: If the Tapis submission fails. + +**Example:** + +```python +job_request = ds.jobs.generate(...) +job = ds.jobs.submit(job_request) +print(f"Job submitted with UUID: {job.uuid}") +``` + +--- + +#### `job(job_uuid)` + +Get a SubmittedJob object for an existing job by UUID. + +**Args:** +- `job_uuid` (str): The UUID of an existing Tapis job. + +**Returns:** `SubmittedJob` -- A job object for monitoring via `.monitor()`. + +**Example:** + +```python +job = ds.jobs.job("12345678-1234-1234-1234-123456789abc") +job.monitor() +``` + +--- + +#### `status(job_uuid)` + +Get the current status of a job by UUID. + +**Args:** +- `job_uuid` (str): The UUID of the job to check. + +**Returns:** `str` -- The current job status (e.g., `"QUEUED"`, `"RUNNING"`, `"FINISHED"`). + +**Raises:** +- `JobMonitorError`: If status retrieval fails. + +**Example:** + +```python +ds.jobs.status("12345678-1234-1234-1234-123456789abc") +# 'FINISHED' ``` +--- + +#### `runtime_summary(job_uuid, verbose=False)` + +Print the runtime summary for a job by UUID. + +**Args:** +- `job_uuid` (str): The UUID of the job to analyze. +- `verbose` (bool, optional): If `True`, prints detailed job history events. Defaults to `False`. + +**Example:** + +```python +ds.jobs.runtime_summary("12345678-1234-1234-1234-123456789abc") +# Runtime Summary +# --------------- +# QUEUED time: 00:05:30 +# RUNNING time: 01:23:45 +# TOTAL time: 01:29:15 +``` + +--- + +#### `interpret_status(final_status, job_uuid=None)` + +Print a user-friendly interpretation of a job status. + +**Args:** +- `final_status` (str): The job status to interpret. +- `job_uuid` (str, optional): The job UUID for context in the message. + +**Example:** + +```python +ds.jobs.interpret_status("FINISHED", "12345678-1234-1234-1234-123456789abc") +# Job 12345678-1234-1234-1234-123456789abc completed successfully. +``` + +--- + +#### `list(app_id=None, status=None, limit=100, output="df", verbose=False)` + +List jobs with optional filtering. Fetches jobs from Tapis ordered by creation date (newest first). Filters are applied client-side. + +**Args:** +- `app_id` (str, optional): Filter by application ID. +- `status` (str, optional): Filter by job status (e.g., `"FINISHED"`). Case-insensitive. +- `limit` (int, optional): Maximum jobs to fetch. Defaults to `100`. +- `output` (str, optional): Output format. `"df"` for pandas DataFrame (default), `"list"` for list of dicts, `"raw"` for TapisResult objects. +- `verbose` (bool, optional): Print job count. Defaults to `False`. + +**Returns:** Depends on `output`: DataFrame, list of dicts, or list of TapisResult objects. + +**Raises:** +- `JobMonitorError`: If the Tapis API call fails. +- `ValueError`: If output format is not recognized. + +**Example:** + +```python +df = ds.jobs.list(app_id="matlab-r2023a", status="FINISHED") +jobs = ds.jobs.list(output="list") +raw = ds.jobs.list(limit=10, output="raw") +``` + +--- + ### SystemMethods -```{eval-rst} -.. autoclass:: dapi.client.SystemMethods - :members: - :undoc-members: -``` \ No newline at end of file +Interface for Tapis system information and queue management. + +#### `queues(system_id, verbose=True)` + +List logical queues available on a Tapis execution system. + +**Args:** +- `system_id` (str): The ID of the execution system (e.g., `"frontera"`). +- `verbose` (bool, optional): If `True`, prints detailed queue information. Defaults to `True`. + +**Returns:** `List[Any]` -- List of queue objects with queue configuration details. + +**Raises:** +- `SystemInfoError`: If the system is not found or queue retrieval fails. +- `ValueError`: If `system_id` is empty. + +**Example:** + +```python +queues = ds.systems.queues("frontera") +``` + +--- + +#### `check_credentials(system_id, username=None)` + +Check whether TMS credentials exist for a user on a system. + +**Args:** +- `system_id` (str): The ID of the Tapis system (e.g., `"frontera"`). +- `username` (str, optional): Username to check. Defaults to the authenticated user. + +**Returns:** `bool` -- `True` if credentials exist, `False` otherwise. + +**Raises:** +- `CredentialError`: If the credential check fails unexpectedly. +- `ValueError`: If `system_id` is empty. + +**Example:** + +```python +has_creds = ds.systems.check_credentials("frontera") +``` + +--- + +#### `establish_credentials(system_id, username=None, force=False, verbose=True)` + +Establish TMS credentials for a user on a Tapis system. Idempotent: skips creation if credentials already exist (unless `force=True`). Only supported for systems using TMS_KEYS authentication. + +**Args:** +- `system_id` (str): The ID of the Tapis system (e.g., `"frontera"`). +- `username` (str, optional): Username. Defaults to the authenticated user. +- `force` (bool, optional): Re-create even if credentials exist. Defaults to `False`. +- `verbose` (bool, optional): Print status messages. Defaults to `True`. + +**Raises:** +- `CredentialError`: If the system is not TMS_KEYS or creation fails. +- `ValueError`: If `system_id` is empty. + +**Example:** + +```python +ds.systems.establish_credentials("frontera") +``` + +--- + +#### `revoke_credentials(system_id, username=None, verbose=True)` + +Remove TMS credentials for a user on a Tapis system. Idempotent: succeeds silently if credentials do not exist. + +**Args:** +- `system_id` (str): The ID of the Tapis system (e.g., `"frontera"`). +- `username` (str, optional): Username. Defaults to the authenticated user. +- `verbose` (bool, optional): Print status messages. Defaults to `True`. + +**Raises:** +- `CredentialError`: If credential removal fails unexpectedly. +- `ValueError`: If `system_id` is empty. + +**Example:** + +```python +ds.systems.revoke_credentials("frontera") +``` + +--- + +### ParametricSweepMethods + +Interface for PyLauncher parameter sweeps. Accessible via `ds.jobs.parametric_sweep`. + +#### `generate(command, sweep, directory=None, *, placeholder_style="token", debug=None, preview=False)` + +Generate PyLauncher sweep files or preview the parameter grid. + +With `preview=True`, returns a DataFrame of all parameter combinations -- no files are written. Otherwise, expands `command` into one command per combination and writes `runsList.txt` and `call_pylauncher.py` into `directory`. + +**Args:** +- `command` (str): Command template with placeholders matching sweep keys. +- `sweep` (Dict[str, Any]): Mapping of placeholder name to sequence of values. +- `directory` (str, optional): Directory to write files into (created if needed). Required when `preview` is `False`. +- `placeholder_style` (str, optional): `"token"` (default) for bare `ALPHA`, or `"braces"` for `{ALPHA}`. +- `debug` (str, optional): Optional debug string (e.g., `"host+job"`). +- `preview` (bool, optional): If `True`, return a DataFrame (dry run). + +**Returns:** `List[str]` of commands, or `pandas.DataFrame` when `preview` is `True`. + +**Example:** + +```python +# Preview the parameter grid +df = ds.jobs.parametric_sweep.generate( + command="python run.py --alpha ALPHA --beta BETA", + sweep={"ALPHA": [0.1, 0.5], "BETA": [1, 2]}, + preview=True, +) + +# Generate sweep files +commands = ds.jobs.parametric_sweep.generate( + command="python run.py --alpha ALPHA --beta BETA", + sweep={"ALPHA": [0.1, 0.5], "BETA": [1, 2]}, + directory="/MyData/sweep/", +) +``` + +--- + +#### `submit(directory, app_id, allocation, *, node_count=None, cores_per_node=None, max_minutes=None, queue=None, **kwargs)` + +Submit a PyLauncher sweep job. Translates `directory` to a Tapis URI, builds a job request with `call_pylauncher.py` as the script, and submits it. + +**Args:** +- `directory` (str): Path to the input directory containing `runsList.txt` and `call_pylauncher.py` (e.g., `"/MyData/sweep/"`). +- `app_id` (str): Tapis application ID (e.g., `"openseespy-s3"`). +- `allocation` (str): TACC allocation to charge. +- `node_count` (int, optional): Number of compute nodes. +- `cores_per_node` (int, optional): Cores per node. +- `max_minutes` (int, optional): Maximum runtime in minutes. +- `queue` (str, optional): Execution queue name. +- `**kwargs`: Additional arguments passed to `ds.jobs.generate()`. + +**Returns:** `SubmittedJob` -- A job object for monitoring via `.monitor()`. + +**Example:** + +```python +job = ds.jobs.parametric_sweep.submit( + directory="/MyData/sweep/", + app_id="openseespy-s3", + allocation="MyProject-123", + node_count=2, + max_minutes=60, +) +job.monitor() +``` diff --git a/docs/api/database.md b/docs/api/database.md index 3321f6f..1a6976c 100644 --- a/docs/api/database.md +++ b/docs/api/database.md @@ -4,24 +4,187 @@ Database connections and query execution for DesignSafe research databases. ## Database Accessor -```{eval-rst} -.. autoclass:: dapi.db.accessor.DatabaseAccessor - :members: - :undoc-members: - :show-inheritance: +### `DatabaseAccessor` + +```python +class dapi.db.accessor.DatabaseAccessor +``` + +Provides lazy access to different DesignSafe database connections via properties. + +This class manages multiple database connections and provides convenient property-based access to different DesignSafe databases. Each database connection is created only when first accessed (lazy initialization) and reused for subsequent calls. + +**Constructor:** + +```python +DatabaseAccessor() +``` + +Initializes the accessor with empty connection slots. No database connections are established until a property is first accessed. + +**Properties:** + +#### `ngl` + +```python +DatabaseAccessor.ngl -> DSDatabase ``` +Access the NGL (Natural Hazards Engineering) database connection manager. Provides access to the `sjbrande_ngl_db` database containing natural hazards engineering research data. The connection is created on first access. + +#### `vp` + +```python +DatabaseAccessor.vp -> DSDatabase +``` + +Access the VP (Vulnerability and Performance) database connection manager. Provides access to the `sjbrande_vpdb` database containing vulnerability and performance analysis data. The connection is created on first access. + +#### `eq` + +```python +DatabaseAccessor.eq -> DSDatabase +``` + +Access the EQ (Post-Earthquake Recovery) database connection manager. Provides access to the `post_earthquake_recovery` database containing post-earthquake recovery research data. The connection is created on first access. + +**Methods:** + +#### `close_all` + +```python +DatabaseAccessor.close_all() -> None +``` + +Close all active database engines and their connection pools. This should be called when the `DatabaseAccessor` is no longer needed to prevent connection leaks. + +After calling `close_all()`, accessing any database property will create new connections since the instances are reset to `None`. + +**Example:** + +```python +accessor = DatabaseAccessor() + +# Access NGL database (created on first access) +ngl_db = accessor.ngl + +# Query the database +results = ngl_db.read_sql("SELECT COUNT(*) as total FROM users") + +# Close all connections when done +accessor.close_all() +``` + +--- + ## Database Engine -```{eval-rst} -.. autoclass:: dapi.db.db.DSDatabase - :members: - :undoc-members: - :show-inheritance: +### `DSDatabase` + +```python +class dapi.db.db.DSDatabase(dbname: str = "ngl") +``` + +Manages connection and querying for a specific DesignSafe database. + +Provides a high-level interface for connecting to preconfigured DesignSafe databases using SQLAlchemy with connection pooling. It supports environment-based configuration and provides query results in multiple formats. + +**Constructor Parameters:** + +| Name | Type | Default | Description | +|------|------|---------|-------------| +| `dbname` | `str` | `"ngl"` | Shorthand name for the database to connect to. Must be one of `"ngl"`, `"vp"`, or `"eq"`. | + +**Raises:** + +- `ValueError` -- If `dbname` is not a valid configured database name. +- `SQLAlchemyError` -- If database engine creation or connection fails. + +**Attributes:** + +| Name | Type | Description | +|------|------|-------------| +| `user` | `str` | Database username for authentication. | +| `password` | `str` | Database password for authentication. | +| `host` | `str` | Database host address. | +| `port` | `int` | Database port number. | +| `db` | `str` | Name of the connected database. | +| `dbname_short` | `str` | Shorthand name for the database. | +| `engine` | `sqlalchemy.Engine` | SQLAlchemy engine for database connections. | +| `Session` | `sqlalchemy.orm.sessionmaker` | Session factory for database operations. | + +**Methods:** + +#### `read_sql` + +```python +DSDatabase.read_sql(sql: str, output_type: str = "DataFrame") -> pd.DataFrame | list[dict] ``` +Execute a SQL query using a dedicated session and return the results. + +Obtains a session from the connection pool, executes the provided SQL query, and returns results in the specified format. The session is automatically closed after execution, returning the connection to the pool. + +| Name | Type | Default | Description | +|------|------|---------|-------------| +| `sql` | `str` | *(required)* | The SQL query string to execute. | +| `output_type` | `str` | `"DataFrame"` | Format for query results. Must be `"DataFrame"` for a `pandas.DataFrame` or `"dict"` for a list of dictionaries. | + +**Returns:** + +- `pandas.DataFrame` when `output_type="DataFrame"` -- a DataFrame with column names as headers. +- `list[dict]` when `output_type="dict"` -- a list of dictionaries where each dict represents a row. + +**Raises:** + +- `ValueError` -- If `sql` is empty/`None` or `output_type` is not `"DataFrame"` or `"dict"`. +- `SQLAlchemyError` -- If a database error occurs during query execution. + +#### `close` + +```python +DSDatabase.close() -> None +``` + +Dispose of the SQLAlchemy engine and close all database connections. + +Properly shuts down the database engine and its connection pool. Call this when the database instance is no longer needed to prevent connection leaks and free up database resources. + +After calling `close()`, this `DSDatabase` instance should not be used for further database operations as the engine will be disposed. + +**Example:** + +```python +db = DSDatabase("ngl") +df = db.read_sql("SELECT * FROM table_name LIMIT 5") + +# Get dictionary results +results = db.read_sql("SELECT COUNT(*) as total FROM users", output_type="dict") + +db.close() +``` + +--- + ## Database Configuration -```{eval-rst} -.. autodata:: dapi.db.config.db_config +### `db_config` + +```python +dapi.db.config.db_config: dict ``` + +A dictionary mapping shorthand database names to their configuration details. + +| Key | Database Name | Env Prefix | Description | +|-----|---------------|------------|-------------| +| `"ngl"` | `sjbrande_ngl_db` | `NGL_` | Natural hazards engineering research database | +| `"vp"` | `sjbrande_vpdb` | `VP_` | Vulnerability and performance database | +| `"eq"` | `post_earthquake_recovery` | `EQ_` | Post-earthquake recovery database | + +For each database, the following environment variables are checked (using the env prefix): + +- `{PREFIX}DB_USER` -- Database username (default: `"dspublic"`) +- `{PREFIX}DB_PASSWORD` -- Database password (default: `"R3ad0nlY"`) +- `{PREFIX}DB_HOST` -- Database host (default: `"129.114.52.174"`) +- `{PREFIX}DB_PORT` -- Database port (default: `3306`) diff --git a/docs/api/exceptions.md b/docs/api/exceptions.md index c03ba67..f8afeae 100644 --- a/docs/api/exceptions.md +++ b/docs/api/exceptions.md @@ -2,60 +2,175 @@ Custom exception classes for DAPI error handling and debugging. +## Exception Hierarchy + +All exceptions inherit from Python's built-in `Exception` via `DapiException`: + +``` +Exception + └── DapiException + ├── AuthenticationError + ├── FileOperationError + ├── AppDiscoveryError + ├── SystemInfoError + ├── CredentialError + ├── JobSubmissionError + └── JobMonitorError +``` + +You can catch `DapiException` to handle any dapi-specific error, or catch a more specific subclass for targeted error handling. + +--- + ## Base Exception -```{eval-rst} -.. autoclass:: dapi.exceptions.DapiException - :members: - :show-inheritance: +### `DapiException` + +```python +class dapi.exceptions.DapiException(message: str) ``` +Base exception class for all dapi-related errors. + +This is the parent class for all custom exceptions in the dapi library. It can be used to catch any dapi-specific error or as a base for creating new custom exceptions. + +**Parameters:** + +| Name | Type | Description | +|------|------|-------------| +| `message` | `str` | Human-readable description of the error. | + +--- + ## Authentication Exceptions -```{eval-rst} -.. autoclass:: dapi.exceptions.AuthenticationError - :members: - :show-inheritance: +### `AuthenticationError` + +```python +class dapi.exceptions.AuthenticationError(message: str) ``` +*Inherits from {py:class}`~dapi.exceptions.DapiException`.* + +Raised when authentication with Tapis fails. This includes invalid credentials, network connectivity problems, or Tapis service unavailability. + +**Raised by:** `dapi.auth.init()` when credentials are invalid, missing, or the Tapis service is unreachable. + +--- + ## File Operation Exceptions -```{eval-rst} -.. autoclass:: dapi.exceptions.FileOperationError - :members: - :show-inheritance: +### `FileOperationError` + +```python +class dapi.exceptions.FileOperationError(message: str) ``` +*Inherits from {py:class}`~dapi.exceptions.DapiException`.* + +Raised when file operations fail, including uploads, downloads, directory listings, path translations, and file existence checks. + +--- + ## Application Discovery Exceptions -```{eval-rst} -.. autoclass:: dapi.exceptions.AppDiscoveryError - :members: - :show-inheritance: +### `AppDiscoveryError` + +```python +class dapi.exceptions.AppDiscoveryError(message: str) ``` +*Inherits from {py:class}`~dapi.exceptions.DapiException`.* + +Raised when searching for Tapis applications fails, when a specific application cannot be found, or when retrieving application details encounters an error. + +--- + ## System Information Exceptions -```{eval-rst} -.. autoclass:: dapi.exceptions.SystemInfoError - :members: - :show-inheritance: +### `SystemInfoError` + +```python +class dapi.exceptions.SystemInfoError(message: str) ``` +*Inherits from {py:class}`~dapi.exceptions.DapiException`.* + +Raised when operations involving Tapis execution systems fail, such as retrieving system details, listing available queues, or checking system availability. + +--- + ## Credential Management Exceptions -::: dapi.exceptions.CredentialError +### `CredentialError` + +```python +class dapi.exceptions.CredentialError(message: str) +``` + +*Inherits from {py:class}`~dapi.exceptions.DapiException`.* + +Raised when credential management operations involving Tapis Managed Secrets (TMS) fail, such as checking, establishing, or revoking user credentials on a Tapis execution system. + +--- ## Job Management Exceptions -```{eval-rst} -.. autoclass:: dapi.exceptions.JobSubmissionError - :members: - :show-inheritance: +### `JobSubmissionError` + +```python +class dapi.exceptions.JobSubmissionError(message: str, request=None, response=None) +``` + +*Inherits from {py:class}`~dapi.exceptions.DapiException`.* + +Raised when job submission or validation fails. This includes errors during job request generation, validation, or submission to Tapis. It carries additional context about the HTTP request and response when available. + +**Parameters:** + +| Name | Type | Default | Description | +|------|------|---------|-------------| +| `message` | `str` | *(required)* | Description of the job submission failure. | +| `request` | `requests.Request` | `None` | The HTTP request object that failed. | +| `response` | `requests.Response` | `None` | The HTTP response object received. | + +**Attributes:** + +| Name | Type | Description | +|------|------|-------------| +| `request` | `requests.Request` | The failed HTTP request, if available. | +| `response` | `requests.Response` | The HTTP response received, if available. | + +The string representation includes request URL, method, response status code, and response body when available. + +**Example:** + +```python +try: + job = client.jobs.submit(job_request) +except JobSubmissionError as e: + print(f"Job submission failed: {e}") + if e.response: + print(f"Status code: {e.response.status_code}") ``` -```{eval-rst} -.. autoclass:: dapi.exceptions.JobMonitorError - :members: - :show-inheritance: +--- + +### `JobMonitorError` + +```python +class dapi.exceptions.JobMonitorError(message: str) +``` + +*Inherits from {py:class}`~dapi.exceptions.DapiException`.* + +Raised when job monitoring or management fails, including errors during job status monitoring, job cancellation, retrieving job details, or accessing job outputs. + +**Example:** + +```python +try: + status = job.monitor(timeout_minutes=60) +except JobMonitorError as e: + print(f"Job monitoring failed: {e}") ``` diff --git a/docs/api/files.md b/docs/api/files.md index 1309756..3c28de1 100644 --- a/docs/api/files.md +++ b/docs/api/files.md @@ -2,26 +2,198 @@ File operations and path translation utilities for DesignSafe storage systems. +All functions below accept an authenticated Tapis client as the first argument. +When using the `DSClient`, the Tapis client is supplied automatically and the +methods are available under `ds.files`. + +| Module function | Client shorthand | +|---|---| +| `get_ds_path_uri(t, ...)` | `ds.files.to_uri(...)` | +| `tapis_uri_to_local_path(...)` | `ds.files.to_path(...)` | +| `upload_file(t, ...)` | `ds.files.upload(...)` | +| `download_file(t, ...)` | `ds.files.download(...)` | +| `list_files(t, ...)` | `ds.files.list(...)` | + +--- + ## Path Translation -```{eval-rst} -.. autofunction:: dapi.files.get_ds_path_uri +### `get_ds_path_uri(t, path, verify_exists=False)` + +Translate DesignSafe-style paths to Tapis URIs. + +Converts commonly used DesignSafe path formats (e.g., `/MyData/folder`, +`/projects/PRJ-XXXX/folder`) to their corresponding Tapis system URIs. +Supports MyData, CommunityData, and project-specific paths with automatic +system discovery for projects. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `path` (`str`): The DesignSafe-style path string to translate. Supported formats: + - MyData paths: `"/MyData/folder"`, `"jupyter/MyData/folder"`, `"/home/jupyter/MyData/folder"` + - Community paths: `"/CommunityData/folder"` + - Project paths: `"/projects/PRJ-XXXX/folder"` + - Direct Tapis URIs: `"tapis://system-id/path"` (passed through) +- `verify_exists` (`bool`, optional): If `True`, verifies the translated path exists on the target Tapis system. Defaults to `False`. + +**Returns:** `str` -- The corresponding Tapis URI (e.g., `"tapis://system-id/path"`). + +**Raises:** + +- `FileOperationError`: If path translation fails, project system lookup fails, or path verification fails (when `verify_exists=True`). +- `AuthenticationError`: If username is required for MyData paths but `t.username` is not available. +- `ValueError`: If the input path format is unrecognized, empty, or incomplete. + +**Example:** + +```python +from dapi.files import get_ds_path_uri + +uri = get_ds_path_uri(client, "/MyData/analysis/results") +# Translated '/MyData/analysis/results' to +# 'tapis://designsafe.storage.default/username/analysis/results' + +uri = get_ds_path_uri(client, "/projects/PRJ-1234/data", verify_exists=True) + +# Using DSClient: +uri = ds.files.to_uri("/MyData/analysis/results") ``` -```{eval-rst} -.. autofunction:: dapi.files.tapis_uri_to_local_path +--- + +### `tapis_uri_to_local_path(tapis_uri)` + +Convert a Tapis URI to the corresponding DesignSafe local path. + +This is the reverse operation of `get_ds_path_uri()`. Converts Tapis system +URIs back to their equivalent DesignSafe local paths accessible in a Jupyter +environment. + +**Args:** + +- `tapis_uri` (`str`): The Tapis URI to convert. Supported formats: + - `"tapis://designsafe.storage.default/username/path"` -> `"/home/jupyter/MyData/path"` + - `"tapis://designsafe.storage.community/path"` -> `"/home/jupyter/CommunityData/path"` + - `"tapis://project-*/path"` -> `"/home/jupyter/MyProjects/path"` + +**Returns:** `str` -- The corresponding DesignSafe local path, or the original URI if it is not a recognized Tapis URI format. + +**Raises:** + +- `ValueError`: If the Tapis URI format is invalid. + +**Example:** + +```python +from dapi.files import tapis_uri_to_local_path + +local_path = tapis_uri_to_local_path( + "tapis://designsafe.storage.default/user/data/file.txt" +) +# "/home/jupyter/MyData/data/file.txt" + +local_path = tapis_uri_to_local_path( + "tapis://designsafe.storage.community/datasets/earthquake.csv" +) +# "/home/jupyter/CommunityData/datasets/earthquake.csv" + +# Using DSClient: +local_path = ds.files.to_path("tapis://designsafe.storage.default/user/data/file.txt") ``` +--- + ## File Operations -```{eval-rst} -.. autofunction:: dapi.files.upload_file +### `upload_file(t, local_path, remote_uri)` + +Upload a local file to a Tapis storage system. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `local_path` (`str`): Path to the local file to upload. +- `remote_uri` (`str`): Tapis URI destination (e.g., `"tapis://system/path/file.txt"`). + +**Raises:** + +- `FileNotFoundError`: If the local file does not exist. +- `ValueError`: If `local_path` is not a file or `remote_uri` is invalid. +- `FileOperationError`: If the Tapis upload operation fails. + +**Example:** + +```python +from dapi.files import upload_file + +upload_file(client, "/local/data.txt", "tapis://mysystem/uploads/data.txt") +# Uploading '/local/data.txt' to system 'mysystem' at path 'uploads/data.txt'... +# Upload complete. + +# Using DSClient: +ds.files.upload("/local/data.txt", "tapis://mysystem/uploads/data.txt") ``` -```{eval-rst} -.. autofunction:: dapi.files.download_file +--- + +### `download_file(t, remote_uri, local_path)` + +Download a file from a Tapis storage system to the local filesystem. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `remote_uri` (`str`): Tapis URI of the file to download (e.g., `"tapis://system/path/file.txt"`). +- `local_path` (`str`): Local filesystem path where the file should be saved. + +**Raises:** + +- `ValueError`: If `local_path` is a directory or `remote_uri` is invalid. +- `FileOperationError`: If the download operation fails or the remote file is not found. + +**Example:** + +```python +from dapi.files import download_file + +download_file(client, "tapis://mysystem/data/results.txt", "/local/results.txt") +# Downloading from system 'mysystem' path 'data/results.txt' to '/local/results.txt'... +# Download complete. + +# Using DSClient: +ds.files.download("tapis://mysystem/data/results.txt", "/local/results.txt") ``` -```{eval-rst} -.. autofunction:: dapi.files.list_files +--- + +### `list_files(t, remote_uri, limit=100, offset=0)` + +List files and directories in a Tapis storage system path. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `remote_uri` (`str`): Tapis URI of the directory to list (e.g., `"tapis://system/path/"`). +- `limit` (`int`, optional): Maximum number of items to return. Defaults to `100`. +- `offset` (`int`, optional): Number of items to skip (for pagination). Defaults to `0`. + +**Returns:** `List[Tapis]` -- List of file and directory objects from the specified path. Each object contains metadata like name, size, type, and permissions. + +**Raises:** + +- `ValueError`: If `remote_uri` is invalid. +- `FileOperationError`: If the listing operation fails or the path is not found. + +**Example:** + +```python +from dapi.files import list_files + +files = list_files(client, "tapis://mysystem/data/") +for f in files: + print(f"{f.name} ({f.type})") + +# Using DSClient: +files = ds.files.list("tapis://mysystem/data/") ``` diff --git a/docs/api/index.md b/docs/api/index.md index 836b893..2c8acbf 100644 --- a/docs/api/index.md +++ b/docs/api/index.md @@ -1,6 +1,6 @@ # API Reference -This section provides comprehensive API documentation for all DAPI modules and classes, automatically generated from docstrings. +This section provides API documentation for all DAPI modules and classes. For the full auto-generated reference with signatures and type annotations, see the [Sphinx API docs](/api/). ## Overview diff --git a/docs/api/jobs.md b/docs/api/jobs.md index ffc7176..86ff8cf 100644 --- a/docs/api/jobs.md +++ b/docs/api/jobs.md @@ -4,63 +4,442 @@ Job submission, monitoring, and management functionality for DesignSafe computat ## Job Request Generation -```{eval-rst} -.. autofunction:: dapi.jobs.generate_job_request +### `generate_job_request(tapis_client, app_id, input_dir_uri, script_filename=None, app_version=None, job_name=None, description=None, tags=None, max_minutes=None, node_count=None, cores_per_node=None, memory_mb=None, queue=None, allocation=None, archive_system=None, archive_path=None, extra_file_inputs=None, extra_app_args=None, extra_env_vars=None, extra_scheduler_options=None, script_param_names=["Input Script", "Main Script", "tclScript"], input_dir_param_name="Input Directory", allocation_param_name="TACC Allocation")` + +Generate a Tapis job request dictionary based on app definition and inputs. + +Creates a properly formatted job request dictionary by retrieving the specified +application details and applying user-provided overrides and additional parameters. +The function automatically maps the script filename (if provided) and input +directory to the appropriate app parameters. It dynamically reads the app definition +to detect parameter names, determines whether to use appArgs or envVariables, and +automatically populates all required parameters with default values when available. + +**Args:** +- `tapis_client` (Tapis): Authenticated Tapis client instance. +- `app_id` (str): The ID of the Tapis application to use for the job. +- `input_dir_uri` (str): Tapis URI to the input directory containing job files. +- `script_filename` (str, optional): Name of the main script file to execute. If `None` (default), no script parameter is added. Suitable for apps like OpenFOAM that don't take a script argument. +- `app_version` (str, optional): Specific app version to use. If `None`, uses latest. +- `job_name` (str, optional): Custom job name. If `None`, auto-generates based on app ID and timestamp. +- `description` (str, optional): Job description. If `None`, uses app description. +- `tags` (List[str], optional): List of tags to associate with the job. +- `max_minutes` (int, optional): Maximum runtime in minutes. Overrides app default. +- `node_count` (int, optional): Number of compute nodes. Overrides app default. +- `cores_per_node` (int, optional): Cores per node. Overrides app default. +- `memory_mb` (int, optional): Memory in MB. Overrides app default. +- `queue` (str, optional): Execution queue name. Overrides app default. +- `allocation` (str, optional): TACC allocation to charge for compute time. +- `archive_system` (str, optional): Archive system for job outputs. If `"designsafe"` is specified, uses `"designsafe.storage.default"`. If `None`, uses app default. +- `archive_path` (str, optional): Archive directory path. Can be a full path or just a directory name in MyData. If `None` and `archive_system` is `"designsafe"`, defaults to `"${EffectiveUserId}/tapis-jobs-archive/${JobCreateDate}/${JobUUID}"`. +- `extra_file_inputs` (List[Dict[str, Any]], optional): Additional file inputs beyond the main input directory. +- `extra_app_args` (List[Dict[str, Any]], optional): Additional application arguments for parameters expected in `appArgs`. +- `extra_env_vars` (List[Dict[str, Any]], optional): Additional environment variables for parameters expected in `envVariables` (e.g., OpenFOAM solver, mesh). Each item should be `{"key": "VAR_NAME", "value": "var_value"}`. +- `extra_scheduler_options` (List[Dict[str, Any]], optional): Additional scheduler options. +- `script_param_names` (List[str], optional): Parameter names/keys to check for script placement. Defaults to `["Input Script", "Main Script", "tclScript"]`. +- `input_dir_param_name` (str, optional): The name of the fileInput in the app definition that corresponds to `input_dir_uri`. Defaults to `"Input Directory"`. Auto-detected from the app definition. +- `allocation_param_name` (str, optional): Parameter name for TACC allocation. Defaults to `"TACC Allocation"`. + +**Returns:** `Dict[str, Any]` -- Complete job request dictionary ready for submission to Tapis. + +**Raises:** +- `AppDiscoveryError`: If the specified app cannot be found or details cannot be retrieved. +- `ValueError`: If required parameters are missing or invalid, or if `script_filename` is provided but no suitable placement can be found. +- `JobSubmissionError`: If unexpected errors occur during job request generation. + +**Example:** + +```python +from dapi.jobs import generate_job_request + +job_request = generate_job_request( + tapis_client=client, + app_id="matlab-r2023a", + input_dir_uri="tapis://designsafe.storage.default/username/input/", + script_filename="run_analysis.m", + max_minutes=120, + allocation="MyProject-123", +) ``` ## Job Submission -```{eval-rst} -.. autofunction:: dapi.jobs.submit_job_request +### `submit_job_request(tapis_client, job_request)` + +Submit a pre-generated job request dictionary to Tapis. + +Takes a complete job request dictionary (typically generated by `generate_job_request`) +and submits it to the Tapis jobs service for execution. Prints the job request +details before submission for debugging purposes. + +**Args:** +- `tapis_client` (Tapis): Authenticated Tapis client instance. +- `job_request` (Dict[str, Any]): Complete job request dictionary containing all necessary job parameters, file inputs, and configuration. + +**Returns:** `SubmittedJob` -- A SubmittedJob object for monitoring and managing the submitted job. + +**Raises:** +- `ValueError`: If `job_request` is not a dictionary. +- `JobSubmissionError`: If the Tapis job submission fails, with additional context from the HTTP request and response when available. + +**Example:** + +```python +from dapi.jobs import generate_job_request, submit_job_request + +job_request = generate_job_request(...) +submitted_job = submit_job_request(client, job_request) +# Job submitted successfully. UUID: 12345678-1234-1234-1234-123456789abc ``` ## Job Monitoring -```{eval-rst} -.. autofunction:: dapi.jobs.get_job_status +### `get_job_status(t, job_uuid)` + +Get the current status of a job by UUID. + +Standalone convenience function that creates a temporary SubmittedJob instance +to retrieve the current status of an existing job. + +**Args:** +- `t` (Tapis): Authenticated Tapis client instance. +- `job_uuid` (str): The UUID of the job to check. + +**Returns:** `str` -- Current job status (e.g., `"QUEUED"`, `"RUNNING"`, `"FINISHED"`, `"FAILED"`). + +**Raises:** +- `JobMonitorError`: If status retrieval fails. +- `TypeError`: If `t` is not a Tapis instance. +- `ValueError`: If `job_uuid` is empty or invalid. + +**Example:** + +```python +from dapi.jobs import get_job_status + +status = get_job_status(client, "12345678-1234-1234-1234-123456789abc") +print(f"Job status: {status}") ``` -```{eval-rst} -.. autofunction:: dapi.jobs.get_runtime_summary +--- + +### `get_runtime_summary(t, job_uuid, verbose=False)` + +Print a runtime summary for a job by UUID. + +Standalone convenience function that creates a temporary SubmittedJob instance +to analyze and print the runtime summary of an existing job. + +**Args:** +- `t` (Tapis): Authenticated Tapis client instance. +- `job_uuid` (str): The UUID of the job to analyze. +- `verbose` (bool, optional): If `True`, prints detailed job history events in addition to the runtime summary. Defaults to `False`. + +**Raises:** +- `JobMonitorError`: If job details cannot be retrieved. +- `TypeError`: If `t` is not a Tapis instance. +- `ValueError`: If `job_uuid` is empty or invalid. + +**Example:** + +```python +from dapi.jobs import get_runtime_summary + +get_runtime_summary(client, "12345678-1234-1234-1234-123456789abc") +# Runtime Summary +# --------------- +# QUEUED time: 00:05:30 +# RUNNING time: 01:23:45 +# TOTAL time: 01:29:15 +# --------------- ``` -```{eval-rst} -.. autofunction:: dapi.jobs.interpret_job_status +--- + +### `interpret_job_status(final_status, job_uuid=None)` + +Print a user-friendly interpretation of a job status. + +Provides human-readable explanations for various job status values, +including both standard Tapis states and special monitoring states. + +**Args:** +- `final_status` (str): The job status to interpret. Can be a standard Tapis status (`"FINISHED"`, `"FAILED"`, etc.) or a special monitoring status (`STATUS_TIMEOUT`, `STATUS_INTERRUPTED`, etc.). +- `job_uuid` (str, optional): The job UUID to include in the message for context. Defaults to `None`. + +**Example:** + +```python +from dapi.jobs import interpret_job_status + +interpret_job_status("FINISHED", "12345678-1234-1234-1234-123456789abc") +# Job 12345678-1234-1234-1234-123456789abc completed successfully. + +interpret_job_status("FAILED") +# Job failed. Check logs or job details. ``` ## Listing Jobs -```{eval-rst} -.. autofunction:: dapi.jobs.list_jobs +### `list_jobs(tapis_client, app_id=None, status=None, limit=100, output="df", verbose=False)` + +Fetch Tapis jobs with optional filtering. + +Retrieves jobs from Tapis ordered by creation date (newest first) +and optionally filters by app ID and/or status. Filters are applied +client-side after fetching. + +**Args:** +- `tapis_client` (Tapis): Authenticated Tapis client instance. +- `app_id` (str, optional): Filter by application ID (e.g., `"opensees-mp-s3"`). +- `status` (str, optional): Filter by job status (e.g., `"FINISHED"`, `"FAILED"`). Case-insensitive. +- `limit` (int, optional): Maximum number of jobs to fetch from Tapis. Defaults to `100`. +- `output` (str, optional): Output format. `"df"` returns a pandas DataFrame (default), `"list"` returns a list of dicts, `"raw"` returns the raw TapisResult objects. +- `verbose` (bool, optional): If `True`, prints the number of jobs found. + +**Returns:** Depends on `output`: +- `"df"`: pandas DataFrame with formatted datetime columns. +- `"list"`: list of dicts with job metadata. +- `"raw"`: list of TapisResult objects as returned by the API. + +**Raises:** +- `JobMonitorError`: If the Tapis API call fails. +- `ValueError`: If output format is not recognized. + +**Example:** + +```python +from dapi.jobs import list_jobs + +df = list_jobs(client, app_id="matlab-r2023a", status="FINISHED") +jobs = list_jobs(client, output="list") +raw = list_jobs(client, limit=10, output="raw") ``` ## SubmittedJob Class -```{eval-rst} -.. autoclass:: dapi.jobs.SubmittedJob - :members: - :undoc-members: - :show-inheritance: +### `SubmittedJob(tapis_client, job_uuid)` + +Represents a submitted Tapis job with methods for monitoring and management. + +This class provides a high-level interface for interacting with Tapis jobs, +including status monitoring, output retrieval, job cancellation, and runtime +analysis. It caches job details and status to minimize API calls. + +**Args:** +- `tapis_client` (Tapis): Authenticated Tapis client instance. +- `job_uuid` (str): The UUID of an existing Tapis job. + +**Raises:** +- `TypeError`: If `tapis_client` is not a Tapis instance. +- `ValueError`: If `job_uuid` is empty or not a string. + +**Example:** + +```python +from dapi.jobs import SubmittedJob + +job = SubmittedJob(client, "12345678-1234-1234-1234-123456789abc") +status = job.status +if status in job.TERMINAL_STATES: + print("Job completed") ``` -## Status Constants +### Properties + +#### `uuid` + +`str` -- The unique identifier of the Tapis job. Set at initialization. + +--- + +#### `status` + +`str` -- The current job status, using cached value when appropriate. For terminal states, returns the cached status without making an API call. For non-terminal states, may fetch fresh status. Returns `STATUS_UNKNOWN` if status cannot be determined. + +--- -```{eval-rst} -.. autodata:: dapi.jobs.STATUS_TIMEOUT +#### `details` + +`Tapis` -- Complete job details object containing all job metadata, configuration, and current state information. Fetches from Tapis if not already cached. + +--- + +#### `last_message` + +`str` or `None` -- The last status message recorded for the job. Contains information about the current job state or errors. Returns `None` if not available or if retrieval fails. + +--- + +#### `archive_uri` + +`str` or `None` -- Tapis URI of the job's archive directory (e.g., `"tapis://designsafe.storage.default/user/tapis-jobs-archive/..."`). Returns `None` if archive information is not set. + +### Methods + +#### `get_status(force_refresh=True)` + +Get the current job status from Tapis API. + +**Args:** +- `force_refresh` (bool, optional): If `True`, always makes a fresh API call. If `False`, may return cached status. Defaults to `True`. + +**Returns:** `str` -- Current job status from Tapis API. + +**Raises:** +- `JobMonitorError`: If status cannot be retrieved from Tapis. + +--- + +#### `monitor(interval=15, timeout_minutes=None)` + +Monitor job status with progress bars until completion or timeout. + +Continuously monitors the job status using tqdm progress bars to show +progress through different job phases (waiting, running). Handles +interruptions and errors gracefully. + +**Args:** +- `interval` (int, optional): Status check interval in seconds. Defaults to `15`. +- `timeout_minutes` (int, optional): Maximum monitoring time in minutes. If `None`, uses the job's `maxMinutes` from its configuration. Use `-1` or `0` for unlimited monitoring. Defaults to `None`. + +**Returns:** `str` -- Final job status. Can be a standard Tapis status (`"FINISHED"`, `"FAILED"`, etc.) or a special monitoring status: +- `STATUS_TIMEOUT`: Monitoring timed out +- `STATUS_INTERRUPTED`: User interrupted monitoring (Ctrl+C) +- `STATUS_MONITOR_ERROR`: Error occurred during monitoring + +**Example:** + +```python +job = SubmittedJob(client, job_uuid) +final_status = job.monitor(interval=30, timeout_minutes=120) +if final_status == "FINISHED": + print("Job completed successfully!") ``` -```{eval-rst} -.. autodata:: dapi.jobs.STATUS_INTERRUPTED +--- + +#### `print_runtime_summary(verbose=False)` + +Print a summary of job runtime phases and total execution time. + +Analyzes the job's execution history to show time spent in different +phases (queued, running) and calculates the total runtime. + +**Args:** +- `verbose` (bool, optional): If `True`, prints detailed job history events in addition to the runtime summary. Defaults to `False`. + +**Example:** + +```python +job.print_runtime_summary() +# Runtime Summary +# --------------- +# QUEUED time: 00:05:30 +# RUNNING time: 01:23:45 +# TOTAL time: 01:29:15 +# --------------- ``` -```{eval-rst} -.. autodata:: dapi.jobs.STATUS_MONITOR_ERROR +--- + +#### `cancel()` + +Attempt to cancel the job execution. Jobs that are already in terminal states cannot be cancelled. + +**Raises:** +- `JobMonitorError`: If the cancellation request fails. + +**Example:** + +```python +job.cancel() +# Cancel request sent for job 12345678-... Status may take time to update. ``` -```{eval-rst} -.. autodata:: dapi.jobs.STATUS_UNKNOWN +--- + +#### `list_outputs(path="/", limit=100, offset=0)` + +List files and directories in the job's archive directory. + +**Args:** +- `path` (str, optional): Relative path within the job archive to list. Defaults to `"/"` (archive root). +- `limit` (int, optional): Maximum number of items to return. Defaults to `100`. +- `offset` (int, optional): Number of items to skip for pagination. Defaults to `0`. + +**Returns:** `List[Tapis]` -- List of file and directory objects in the specified path. + +**Raises:** +- `FileOperationError`: If archive information is not available or listing fails. + +**Example:** + +```python +outputs = job.list_outputs() +for item in outputs: + print(f"{item.name} ({item.type})") + +results = job.list_outputs(path="results/") +``` + +--- + +#### `get_output_content(output_filename, max_lines=None, missing_ok=True)` + +Retrieve the content of a specific output file from the job's archive. + +**Args:** +- `output_filename` (str): Name of the file in the job's archive root (e.g., `"tapisjob.out"`, `"tapisjob.err"`). +- `max_lines` (int, optional): If specified, returns only the last N lines of the file. Defaults to `None` (full file). +- `missing_ok` (bool, optional): If `True` and the file is not found, returns `None`. If `False`, raises `FileOperationError`. Defaults to `True`. + +**Returns:** `str` or `None` -- Content of the file as a string, or `None` if the file is not found and `missing_ok=True`. + +**Raises:** +- `FileOperationError`: If the archive is not available, the file is not found (and `missing_ok=False`), or fetching fails. + +**Example:** + +```python +# Get job output log +output = job.get_output_content("tapisjob.out") + +# Get last 50 lines of error log +errors = job.get_output_content("tapisjob.err", max_lines=50) + +# Require file to exist +results = job.get_output_content("results.txt", missing_ok=False) ``` -```{eval-rst} -.. autodata:: dapi.jobs.TAPIS_TERMINAL_STATES +--- + +#### `download_output(remote_path, local_target)` + +Download a specific file from the job's archive directory. + +**Args:** +- `remote_path` (str): Relative path to the file within the job archive. +- `local_target` (str): Local filesystem path where the file should be saved. + +**Raises:** +- `FileOperationError`: If archive information is not available or download fails. + +**Example:** + +```python +job.download_output("tapisjob.out", "/local/job_output.txt") +job.download_output("results/data.txt", "/local/results/data.txt") ``` + +## Status Constants + +Module-level constants used by the monitoring system: + +| Constant | Value | Description | +|---|---|---| +| `STATUS_TIMEOUT` | `"TIMEOUT"` | Monitoring timed out before the job reached a terminal state. | +| `STATUS_INTERRUPTED` | `"INTERRUPTED"` | User interrupted monitoring (e.g., Ctrl+C). | +| `STATUS_MONITOR_ERROR` | `"MONITOR_ERROR"` | An error occurred during the monitoring loop. | +| `STATUS_UNKNOWN` | `"UNKNOWN"` | Job status could not be determined. | +| `TAPIS_TERMINAL_STATES` | `["FINISHED", "FAILED", "CANCELLED", "STOPPED", "ARCHIVING_FAILED"]` | Standard Tapis states indicating a job has completed (successfully or not). | diff --git a/docs/api/launcher.md b/docs/api/launcher.md index 0de21d5..a99446d 100644 --- a/docs/api/launcher.md +++ b/docs/api/launcher.md @@ -4,14 +4,134 @@ PyLauncher parameter sweep utilities for generating task lists and launcher scri ## Generate Sweep -```{eval-rst} -.. autofunction:: dapi.launcher.generate_sweep +### `generate_sweep(command, sweep, directory=None, *, placeholder_style="token", debug=None, preview=False)` + +Generate sweep commands and write PyLauncher input files. + +When `preview` is `True`, returns a DataFrame of all parameter combinations without writing any files -- useful for inspecting the sweep in a notebook before committing. + +When `preview` is `False` (default), expands `command` into one command per parameter combination and writes `runsList.txt` and `call_pylauncher.py` into `directory`. + +**Args:** +- `command` (str): Command template containing placeholders that match keys in `sweep`. Environment variables like `$WORK` or `$SLURM_JOB_ID` are left untouched. +- `sweep` (Mapping[str, Sequence[Any]]): Mapping of placeholder name to a sequence of values. Example: `{"ALPHA": [0.3, 0.5], "BETA": [1, 2]}`. +- `directory` (str | Path, optional): Directory to write files into. Created if it does not exist. Required when `preview` is `False`. +- `placeholder_style` (str, optional): How placeholders appear in `command`: + - `"token"` (default): bare tokens, e.g. `ALPHA` + - `"braces"`: brace-wrapped, e.g. `{ALPHA}` +- `debug` (str, optional): Optional debug string passed to `ClassicLauncher` (e.g., `"host+job"`). Ignored when `preview` is `True`. +- `preview` (bool, optional): If `True`, return a DataFrame of parameter combinations without writing files. Defaults to `False`. + +**Returns:** `List[str]` of generated commands when `preview` is `False`, or a `pandas.DataFrame` of parameter combinations when `True`. + +**Raises:** +- `TypeError`: If a sweep value is not a non-string sequence. +- `ValueError`: If a sweep value is empty, `placeholder_style` is invalid, or `directory` is missing when `preview` is `False`. + +**Example:** + +```python +from dapi.launcher import generate_sweep + +# Preview parameter combinations +df = generate_sweep( + command="python run.py --alpha ALPHA --beta BETA", + sweep={"ALPHA": [0.1, 0.5, 1.0], "BETA": [1, 2]}, + preview=True, +) +print(df) +# ALPHA BETA +# 0 0.1 1 +# 1 0.1 2 +# 2 0.5 1 +# 3 0.5 2 +# 4 1.0 1 +# 5 1.0 2 + +# Generate files for PyLauncher +commands = generate_sweep( + command="python run.py --alpha ALPHA --beta BETA", + sweep={"ALPHA": [0.1, 0.5, 1.0], "BETA": [1, 2]}, + directory="/home/jupyter/MyData/sweep/", +) +# Writes runsList.txt and call_pylauncher.py to the directory ``` ## Client Interface -```{eval-rst} -.. autoclass:: dapi.client.ParametricSweepMethods - :members: - :undoc-members: +The `ParametricSweepMethods` class is accessible via `ds.jobs.parametric_sweep` on a `DSClient` instance. It wraps `generate_sweep` and adds a `submit` method that handles Tapis URI translation and job submission. + +### `ParametricSweepMethods.generate(command, sweep, directory=None, *, placeholder_style="token", debug=None, preview=False)` + +Generate PyLauncher sweep files or preview the parameter grid. This is a convenience wrapper around `generate_sweep()`. + +**Args:** +- `command` (str): Command template with placeholders matching sweep keys. +- `sweep` (Dict[str, Any]): Mapping of placeholder name to sequence of values. +- `directory` (str, optional): Directory to write files into (created if needed). Required when `preview` is `False`. +- `placeholder_style` (str, optional): `"token"` (default) for bare `ALPHA`, or `"braces"` for `{ALPHA}`. +- `debug` (str, optional): Optional debug string (e.g., `"host+job"`). +- `preview` (bool, optional): If `True`, return a DataFrame (dry run). + +**Returns:** `List[str]` of commands, or `pandas.DataFrame` when `preview` is `True`. + +**Example:** + +```python +ds = DSClient() + +# Preview +df = ds.jobs.parametric_sweep.generate( + command="python run.py --alpha ALPHA", + sweep={"ALPHA": [0.1, 0.5, 1.0]}, + preview=True, +) + +# Write files +commands = ds.jobs.parametric_sweep.generate( + command="python run.py --alpha ALPHA", + sweep={"ALPHA": [0.1, 0.5, 1.0]}, + directory="/home/jupyter/MyData/sweep/", +) +``` + +--- + +### `ParametricSweepMethods.submit(directory, app_id, allocation, *, node_count=None, cores_per_node=None, max_minutes=None, queue=None, **kwargs)` + +Submit a PyLauncher sweep job. Translates `directory` to a Tapis URI, builds a job request with `call_pylauncher.py` as the script, and submits it. + +**Args:** +- `directory` (str): Path to the input directory containing `runsList.txt` and `call_pylauncher.py` (e.g., `"/MyData/sweep/"`). +- `app_id` (str): Tapis application ID (e.g., `"openseespy-s3"`). +- `allocation` (str): TACC allocation to charge. +- `node_count` (int, optional): Number of compute nodes. +- `cores_per_node` (int, optional): Cores per node. +- `max_minutes` (int, optional): Maximum runtime in minutes. +- `queue` (str, optional): Execution queue name. +- `**kwargs`: Additional arguments passed to `ds.jobs.generate()`. + +**Returns:** `SubmittedJob` -- A job object for monitoring via `.monitor()`. + +**Example:** + +```python +ds = DSClient() + +# Generate sweep files first +ds.jobs.parametric_sweep.generate( + command="python run.py --alpha ALPHA --beta BETA", + sweep={"ALPHA": [0.1, 0.5], "BETA": [1, 2]}, + directory="/home/jupyter/MyData/sweep/", +) + +# Submit the sweep job +job = ds.jobs.parametric_sweep.submit( + directory="/MyData/sweep/", + app_id="openseespy-s3", + allocation="MyProject-123", + node_count=2, + max_minutes=60, +) +job.monitor() ``` diff --git a/docs/api/systems.md b/docs/api/systems.md index ea8b523..15733d2 100644 --- a/docs/api/systems.md +++ b/docs/api/systems.md @@ -2,30 +2,189 @@ System information, queue management, and TMS credential management for DesignSafe execution systems. +All functions below accept an authenticated Tapis client as the first argument. +When using the `DSClient`, the Tapis client is supplied automatically and the +methods are available under `ds.systems`. + +| Module function | Client shorthand | +|---|---| +| `list_system_queues(t, ...)` | `ds.systems.queues(...)` | +| `check_credentials(t, ...)` | `ds.systems.check_credentials(...)` | +| `establish_credentials(t, ...)` | `ds.systems.establish_credentials(...)` | +| `revoke_credentials(t, ...)` | `ds.systems.revoke_credentials(...)` | +| `setup_tms_credentials(t, ...)` | *(called automatically during `DSClient` init)* | + +--- + ## System Queues -```{eval-rst} -.. autofunction:: dapi.systems.list_system_queues +### `list_system_queues(t, system_id, verbose=True)` + +Retrieve the list of batch logical queues available on a specific Tapis execution system. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `system_id` (`str`): The ID of the execution system (e.g., `"frontera"`, `"stampede3"`). +- `verbose` (`bool`, optional): If `True`, prints the found queues with details. Defaults to `True`. + +**Returns:** `List[Any]` -- A list of queue objects (typically `TapisResult` instances) defined for the system. Returns an empty list if the system exists but has no queues defined. + +**Raises:** + +- `SystemInfoError`: If the system is not found or an API error occurs. +- `ValueError`: If `system_id` is empty. + +**Example:** + +```python +from dapi.systems import list_system_queues + +queues = list_system_queues(client, "frontera") +# Fetching queue information for system 'frontera'... +# Found 3 batch logical queues for system 'frontera': +# - Name: normal (HPC Queue: normal, Max Jobs: 50, ...) +# - Name: development (HPC Queue: development, Max Jobs: 1, ...) + +# Using DSClient: +queues = ds.systems.queues("frontera") ``` +--- + ## TMS Credential Management Manage Tapis Managed Secrets (TMS) credentials on execution systems. TMS credentials are SSH key pairs that allow Tapis to access TACC systems (Frontera, Stampede3, Lonestar6) on behalf of a user. They must be established once per system before submitting jobs. -### Check Credentials +### `check_credentials(t, system_id, username=None)` + +Check whether TMS credentials exist for a user on a Tapis system. -```{eval-rst} -.. autofunction:: dapi.systems.check_credentials +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `system_id` (`str`): The ID of the Tapis system (e.g., `"frontera"`, `"stampede3"`). +- `username` (`Optional[str]`, optional): The username to check. If `None`, auto-detected from `t.username`. Defaults to `None`. + +**Returns:** `bool` -- `True` if credentials exist, `False` if they do not. + +**Raises:** + +- `ValueError`: If `system_id` is empty or username cannot be determined. +- `CredentialError`: If an unexpected API error occurs during the check. + +**Example:** + +```python +from dapi.systems import check_credentials + +has_creds = check_credentials(client, "frontera") +print(has_creds) # True or False + +# Using DSClient: +has_creds = ds.systems.check_credentials("frontera") ``` -### Establish Credentials +--- + +### `establish_credentials(t, system_id, username=None, force=False, verbose=True)` + +Establish TMS credentials for a user on a Tapis system. + +Idempotent: if credentials already exist and `force` is `False`, no action is taken. +Only systems with `defaultAuthnMethod` set to `"TMS_KEYS"` are supported. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `system_id` (`str`): The ID of the Tapis system (e.g., `"frontera"`, `"stampede3"`). +- `username` (`Optional[str]`, optional): The username. If `None`, auto-detected from `t.username`. Defaults to `None`. +- `force` (`bool`, optional): If `True`, create credentials even if they already exist. Defaults to `False`. +- `verbose` (`bool`, optional): If `True`, prints status messages. Defaults to `True`. + +**Raises:** -```{eval-rst} -.. autofunction:: dapi.systems.establish_credentials +- `ValueError`: If `system_id` is empty or username cannot be determined. +- `CredentialError`: If the system does not use `TMS_KEYS`, if the system is not found, or if credential creation fails. + +**Example:** + +```python +from dapi.systems import establish_credentials + +establish_credentials(client, "frontera") +# TMS credentials established for user 'myuser' on system 'frontera'. + +# Force re-creation: +establish_credentials(client, "frontera", force=True) + +# Using DSClient: +ds.systems.establish_credentials("frontera") +``` + +--- + +### `revoke_credentials(t, system_id, username=None, verbose=True)` + +Remove TMS credentials for a user on a Tapis system. + +Idempotent: if credentials do not exist, no error is raised. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `system_id` (`str`): The ID of the Tapis system (e.g., `"frontera"`, `"stampede3"`). +- `username` (`Optional[str]`, optional): The username. If `None`, auto-detected from `t.username`. Defaults to `None`. +- `verbose` (`bool`, optional): If `True`, prints status messages. Defaults to `True`. + +**Raises:** + +- `ValueError`: If `system_id` is empty or username cannot be determined. +- `CredentialError`: If credential removal fails unexpectedly. + +**Example:** + +```python +from dapi.systems import revoke_credentials + +revoke_credentials(client, "frontera") +# Credentials revoked for user 'myuser' on system 'frontera'. + +# Using DSClient: +ds.systems.revoke_credentials("frontera") ``` -### Revoke Credentials +--- + +### `setup_tms_credentials(t, systems=None)` + +Check and establish TMS credentials on execution systems. + +For each system, checks if credentials exist and creates them if missing. +Failures are handled gracefully -- a system that cannot be reached or where +the user lacks an allocation is skipped with a warning. + +This function is called automatically during `DSClient` initialization for the +default TACC systems. + +**Args:** + +- `t` (`Tapis`): Authenticated Tapis client instance. +- `systems` (`Optional[List[str]]`, optional): List of system IDs to set up. Defaults to `TACC_SYSTEMS` (`["frontera", "stampede3", "ls6"]`). + +**Returns:** `Dict[str, str]` -- A dictionary mapping each `system_id` to its status: `"ready"` (credentials already existed), `"created"` (newly established), or `"skipped"` (system unreachable or not TMS_KEYS). + +**Example:** + +```python +from dapi.systems import setup_tms_credentials + +results = setup_tms_credentials(client) +# TMS credentials ready: frontera, stampede3 (newly created: stampede3) +# TMS credentials skipped: ls6 +print(results) +# {'frontera': 'ready', 'stampede3': 'created', 'ls6': 'skipped'} -```{eval-rst} -.. autofunction:: dapi.systems.revoke_credentials +# With custom system list: +results = setup_tms_credentials(client, systems=["frontera"]) ``` diff --git a/docs/installation.md b/docs/installation.md index 99312b7..721f403 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -105,7 +105,7 @@ print(dir(dapi)) Expected output: ``` -dapi version: 1.1.0 +dapi version: 0.4.9 Available functions: ['DSClient', 'SubmittedJob', 'interpret_job_status', ...] ``` diff --git a/myst.yml b/myst.yml index 1fd623c..37ec574 100644 --- a/myst.yml +++ b/myst.yml @@ -49,6 +49,7 @@ project: - out/** - "**.ipynb_checkpoints" - examples/** + - docs-api/** - README.md - AUTHORS.md - LICENSE.md diff --git a/pyproject.toml b/pyproject.toml index f32cf1d..ad4dcdf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,9 @@ dev = [ ] docs = [ "jupyter-book>=2.0.0", + "sphinx>=7.0", + "sphinx-autodoc-typehints>=2.0", + "furo", ] [build-system] From ca65df728370d616b314254cfd3f2f03acc375aa Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 21:13:52 -0500 Subject: [PATCH 13/21] fix formatting --- dapi/client.py | 12 ++++--- dapi/jobs.py | 12 ++++--- dapi/launcher.py | 9 ++---- dapi/systems.py | 20 +++--------- examples/mpm/mpm.ipynb | 26 ++++++++++++++- examples/pylauncher_sweep.ipynb | 10 +++--- tests/jobs/test_list_jobs.py | 49 ++++++++++++++++++++++------- tests/jobs/test_parametric_sweep.py | 20 +++++++++--- tests/systems/test_credentials.py | 8 ++--- 9 files changed, 108 insertions(+), 58 deletions(-) diff --git a/dapi/client.py b/dapi/client.py index adf760c..64056f3 100644 --- a/dapi/client.py +++ b/dapi/client.py @@ -287,9 +287,7 @@ def queues(self, system_id: str, verbose: bool = True) -> List[Any]: self._tapis, system_id, verbose=verbose ) - def check_credentials( - self, system_id: str, username: str = None - ) -> bool: + def check_credentials(self, system_id: str, username: str = None) -> bool: """Check whether TMS credentials exist for a user on a system. Args: @@ -407,8 +405,12 @@ def generate( *preview* is ``True``. """ return launcher_module.generate_sweep( - command, sweep, directory, - placeholder_style=placeholder_style, debug=debug, preview=preview, + command, + sweep, + directory, + placeholder_style=placeholder_style, + debug=debug, + preview=preview, ) def submit( diff --git a/dapi/jobs.py b/dapi/jobs.py index 1064790..a5e7ed0 100644 --- a/dapi/jobs.py +++ b/dapi/jobs.py @@ -1412,8 +1412,7 @@ def list_jobs( results = [j for j in results if getattr(j, "appId", None) == app_id] if status: results = [ - j for j in results - if getattr(j, "status", "").upper() == status.upper() + j for j in results if getattr(j, "status", "").upper() == status.upper() ] if verbose: print(f"Found {len(results)} jobs.") @@ -1451,8 +1450,13 @@ def list_jobs( # Reorder: priority columns first priority = [ - "name", "uuid", "status", "appId", "appVersion", - "created_dt", "ended_dt", + "name", + "uuid", + "status", + "appId", + "appVersion", + "created_dt", + "ended_dt", ] priority_present = [c for c in priority if c in df.columns] remaining = [c for c in df.columns if c not in priority_present] diff --git a/dapi/launcher.py b/dapi/launcher.py index addc989..caf9a10 100644 --- a/dapi/launcher.py +++ b/dapi/launcher.py @@ -119,9 +119,7 @@ def generate_sweep( dirpath.mkdir(parents=True, exist_ok=True) # Write runsList.txt - (dirpath / "runsList.txt").write_text( - "\n".join(commands) + "\n", encoding="utf-8" - ) + (dirpath / "runsList.txt").write_text("\n".join(commands) + "\n", encoding="utf-8") # Write call_pylauncher.py if debug is not None: @@ -130,10 +128,7 @@ def generate_sweep( f'pylauncher.ClassicLauncher("runsList.txt", debug="{debug}")\n' ) else: - script = ( - "import pylauncher\n" - 'pylauncher.ClassicLauncher("runsList.txt")\n' - ) + script = "import pylauncher\n" 'pylauncher.ClassicLauncher("runsList.txt")\n' (dirpath / "call_pylauncher.py").write_text(script, encoding="utf-8") return commands diff --git a/dapi/systems.py b/dapi/systems.py index fcc757f..295d4e4 100644 --- a/dapi/systems.py +++ b/dapi/systems.py @@ -118,9 +118,7 @@ def _resolve_username(t: Tapis, username: Optional[str] = None) -> str: return effective -def check_credentials( - t: Tapis, system_id: str, username: Optional[str] = None -) -> bool: +def check_credentials(t: Tapis, system_id: str, username: Optional[str] = None) -> bool: """Check whether TMS credentials exist for a user on a Tapis system. Args: @@ -141,9 +139,7 @@ def check_credentials( effective_username = _resolve_username(t, username) try: - t.systems.checkUserCredential( - systemId=system_id, userName=effective_username - ) + t.systems.checkUserCredential(systemId=system_id, userName=effective_username) return True except (UnauthorizedError, NotFoundError): return False @@ -194,12 +190,8 @@ def establish_credentials( authn_method = getattr(system_details, "defaultAuthnMethod", None) except BaseTapyException as e: if hasattr(e, "response") and e.response and e.response.status_code == 404: - raise CredentialError( - f"System '{system_id}' not found." - ) from e - raise CredentialError( - f"Failed to retrieve system '{system_id}': {e}" - ) from e + raise CredentialError(f"System '{system_id}' not found.") from e + raise CredentialError(f"Failed to retrieve system '{system_id}': {e}") from e if authn_method != "TMS_KEYS": raise CredentialError( @@ -268,9 +260,7 @@ def revoke_credentials( effective_username = _resolve_username(t, username) try: - t.systems.removeUserCredential( - systemId=system_id, userName=effective_username - ) + t.systems.removeUserCredential(systemId=system_id, userName=effective_username) if verbose: print( f"Credentials revoked for user '{effective_username}' " diff --git a/examples/mpm/mpm.ipynb b/examples/mpm/mpm.ipynb index 08cd0c2..3808838 100644 --- a/examples/mpm/mpm.ipynb +++ b/examples/mpm/mpm.ipynb @@ -825,7 +825,31 @@ "id": "e074a3c3", "metadata": {}, "outputs": [], - "source": "# --- Translate Path with Verification ---\nds_path: str = \"/MyData/mpm-benchmarks/2d/uniaxial_stress/\"\nds_path_nonexistent: str = \"/MyData/this/path/does/not/exist/\"\n\ntry:\n # Translate and verify the existing path\n print(f\"\\nTranslating and verifying path: {ds_path}\")\n input_uri = ds.files.to_uri(ds_path, verify_exists=True)\n print(f\"Input Directory Tapis URI (verified): {input_uri}\")\n\n # Example: Try translating a non-existent path with verification (will raise error)\n print(f\"\\nTranslating and verifying non-existent path: {ds_path_nonexistent}\")\n input_uri_bad = ds.files.to_uri(\n ds_path_nonexistent, verify_exists=True\n )\n print(f\"This line should not be reached.\")\n\nexcept FileOperationError as e:\n print(f\"Error during path translation/verification: {e}\")\n # Decide how to handle the error (e.g., stop notebook, use default, etc.)\n # For this example, we'll stop if verification fails.\n raise SystemExit(\"Stopping notebook due to path verification error.\")\nexcept Exception as e:\n print(f\"An unexpected error occurred during path translation: {e}\")\n raise SystemExit(\"Stopping notebook due to unexpected path translation error.\")" + "source": [ + "# --- Translate Path with Verification ---\n", + "ds_path: str = \"/MyData/mpm-benchmarks/2d/uniaxial_stress/\"\n", + "ds_path_nonexistent: str = \"/MyData/this/path/does/not/exist/\"\n", + "\n", + "try:\n", + " # Translate and verify the existing path\n", + " print(f\"\\nTranslating and verifying path: {ds_path}\")\n", + " input_uri = ds.files.to_uri(ds_path, verify_exists=True)\n", + " print(f\"Input Directory Tapis URI (verified): {input_uri}\")\n", + "\n", + " # Example: Try translating a non-existent path with verification (will raise error)\n", + " print(f\"\\nTranslating and verifying non-existent path: {ds_path_nonexistent}\")\n", + " input_uri_bad = ds.files.to_uri(ds_path_nonexistent, verify_exists=True)\n", + " print(f\"This line should not be reached.\")\n", + "\n", + "except FileOperationError as e:\n", + " print(f\"Error during path translation/verification: {e}\")\n", + " # Decide how to handle the error (e.g., stop notebook, use default, etc.)\n", + " # For this example, we'll stop if verification fails.\n", + " raise SystemExit(\"Stopping notebook due to path verification error.\")\n", + "except Exception as e:\n", + " print(f\"An unexpected error occurred during path translation: {e}\")\n", + " raise SystemExit(\"Stopping notebook due to unexpected path translation error.\")" + ] } ], "metadata": { diff --git a/examples/pylauncher_sweep.ipynb b/examples/pylauncher_sweep.ipynb index c57b861..f51b384 100644 --- a/examples/pylauncher_sweep.ipynb +++ b/examples/pylauncher_sweep.ipynb @@ -112,7 +112,7 @@ "source": [ "sweep = {\n", " \"ALPHA\": [0.3, 0.5, 3.7],\n", - " \"BETA\": [1.1, 2.0, 3.0],\n", + " \"BETA\": [1.1, 2.0, 3.0],\n", "}" ] }, @@ -130,7 +130,7 @@ "outputs": [], "source": [ "ds.jobs.parametric_sweep.generate(\n", - " 'python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA',\n", + " \"python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA\",\n", " sweep,\n", " preview=True,\n", ")" @@ -150,7 +150,7 @@ "outputs": [], "source": [ "commands = ds.jobs.parametric_sweep.generate(\n", - " 'python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA',\n", + " \"python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA\",\n", " sweep,\n", " input_dir_generic,\n", " debug=\"host+job\",\n", @@ -245,7 +245,7 @@ "input_dir_opensees = os.path.expanduser(\"~/MyData/opensees_sweep/\")\n", "os.makedirs(input_dir_opensees, exist_ok=True)\n", "\n", - "cantilever_script = '''\\\n", + "cantilever_script = \"\"\"\\\n", "# Ex1a.Canti2D.Push — OpenSeesPy cantilever pushover\n", "# Adapted from Silvia Mazzoni & Frank McKenna, 2006/2020\n", "# Units: kip, inch, second\n", @@ -317,7 +317,7 @@ "ops.analyze(1000)\n", "\n", "print(f\"Done: NodalMass={NodalMass}, LCol={LCol}\")\n", - "'''\n", + "\"\"\"\n", "\n", "with open(os.path.join(input_dir_opensees, \"cantilever.py\"), \"w\") as f:\n", " f.write(cantilever_script)\n", diff --git a/tests/jobs/test_list_jobs.py b/tests/jobs/test_list_jobs.py index 032a993..84e8fc2 100644 --- a/tests/jobs/test_list_jobs.py +++ b/tests/jobs/test_list_jobs.py @@ -8,11 +8,17 @@ from dapi.exceptions import JobMonitorError -def _make_job(uuid, name, status, app_id, app_version="1.0", - created="2025-06-15T10:00:00.000Z", - ended="2025-06-15T11:00:00.000Z", - remote_started="2025-06-15T10:05:00.000Z", - last_updated="2025-06-15T11:00:00.000Z"): +def _make_job( + uuid, + name, + status, + app_id, + app_version="1.0", + created="2025-06-15T10:00:00.000Z", + ended="2025-06-15T11:00:00.000Z", + remote_started="2025-06-15T10:05:00.000Z", + last_updated="2025-06-15T11:00:00.000Z", +): """Create a mock TapisResult job object.""" job = Mock() job.__dict__ = { @@ -37,8 +43,14 @@ def _make_job(uuid, name, status, app_id, app_version="1.0", _make_job("uuid-001", "matlab-run-1", "FINISHED", "matlab-r2023a"), _make_job("uuid-002", "opensees-run-1", "FINISHED", "opensees-mp-s3"), _make_job("uuid-003", "matlab-run-2", "FAILED", "matlab-r2023a"), - _make_job("uuid-004", "mpm-run-1", "RUNNING", "mpm-s3", - ended=None, remote_started="2025-06-15T10:10:00.000Z"), + _make_job( + "uuid-004", + "mpm-run-1", + "RUNNING", + "mpm-s3", + ended=None, + remote_started="2025-06-15T10:10:00.000Z", + ), ] @@ -79,8 +91,14 @@ def test_combined_filters(self): def test_datetime_columns_exist(self): df = list_jobs(self.t) - for col in ["created_dt", "created_date", "ended_dt", "ended_date", - "remoteStarted_dt", "lastUpdated_dt"]: + for col in [ + "created_dt", + "created_date", + "ended_dt", + "ended_date", + "remoteStarted_dt", + "lastUpdated_dt", + ]: self.assertIn(col, df.columns) def test_datetime_nat_for_missing(self): @@ -91,9 +109,16 @@ def test_datetime_nat_for_missing(self): def test_priority_column_order(self): df = list_jobs(self.t) - expected_first = ["name", "uuid", "status", "appId", "appVersion", - "created_dt", "ended_dt"] - actual_first = list(df.columns[:len(expected_first)]) + expected_first = [ + "name", + "uuid", + "status", + "appId", + "appVersion", + "created_dt", + "ended_dt", + ] + actual_first = list(df.columns[: len(expected_first)]) self.assertEqual(actual_first, expected_first) def test_passes_limit_to_api(self): diff --git a/tests/jobs/test_parametric_sweep.py b/tests/jobs/test_parametric_sweep.py index a6d8b58..f710985 100644 --- a/tests/jobs/test_parametric_sweep.py +++ b/tests/jobs/test_parametric_sweep.py @@ -17,14 +17,18 @@ def test_empty_sweep_returns_base_command(self): def test_single_param(self): with tempfile.TemporaryDirectory() as d: - cmds = generate_sweep("python run.py --alpha ALPHA", {"ALPHA": [1, 2, 3]}, d) + cmds = generate_sweep( + "python run.py --alpha ALPHA", {"ALPHA": [1, 2, 3]}, d + ) self.assertEqual(len(cmds), 3) self.assertEqual(cmds[0], "python run.py --alpha 1") self.assertEqual(cmds[2], "python run.py --alpha 3") def test_multi_param_cartesian_product(self): with tempfile.TemporaryDirectory() as d: - cmds = generate_sweep("python run.py --a A --b B", {"A": [1, 2], "B": [10, 20]}, d) + cmds = generate_sweep( + "python run.py --a A --b B", {"A": [1, 2], "B": [10, 20]}, d + ) self.assertEqual(len(cmds), 4) self.assertIn("python run.py --a 1 --b 10", cmds) self.assertIn("python run.py --a 2 --b 20", cmds) @@ -36,17 +40,23 @@ def test_deterministic_order(self): def test_token_placeholder(self): with tempfile.TemporaryDirectory() as d: - cmds = generate_sweep("echo ALPHA", {"ALPHA": [1]}, d, placeholder_style="token") + cmds = generate_sweep( + "echo ALPHA", {"ALPHA": [1]}, d, placeholder_style="token" + ) self.assertEqual(cmds, ["echo 1"]) def test_braces_placeholder(self): with tempfile.TemporaryDirectory() as d: - cmds = generate_sweep("echo {ALPHA}", {"ALPHA": [1]}, d, placeholder_style="braces") + cmds = generate_sweep( + "echo {ALPHA}", {"ALPHA": [1]}, d, placeholder_style="braces" + ) self.assertEqual(cmds, ["echo 1"]) def test_env_vars_preserved(self): with tempfile.TemporaryDirectory() as d: - cmds = generate_sweep('run --out "$WORK/$SLURM_JOB_ID" --a A', {"A": [1]}, d) + cmds = generate_sweep( + 'run --out "$WORK/$SLURM_JOB_ID" --a A', {"A": [1]}, d + ) self.assertIn("$WORK", cmds[0]) self.assertIn("$SLURM_JOB_ID", cmds[0]) diff --git a/tests/systems/test_credentials.py b/tests/systems/test_credentials.py index 73b523f..cf46ef0 100644 --- a/tests/systems/test_credentials.py +++ b/tests/systems/test_credentials.py @@ -144,9 +144,7 @@ def test_raises_value_error_for_empty_system_id(self): def test_uses_explicit_username(self): self.t.systems.checkUserCredential.side_effect = UnauthorizedError() - establish_credentials( - self.t, "frontera", username="otheruser", verbose=False - ) + establish_credentials(self.t, "frontera", username="otheruser", verbose=False) self.t.systems.createUserCredential.assert_called_once_with( systemId="frontera", userName="otheruser", createTmsKeys=True ) @@ -159,7 +157,9 @@ def test_raises_credential_error_on_create_failure(self): with self.assertRaises(CredentialError): establish_credentials(self.t, "frontera", verbose=False) - def test_verbose_prints_skip_message(self, ): + def test_verbose_prints_skip_message( + self, + ): self.t.systems.checkUserCredential.return_value = Mock() # Should not raise; just prints a message establish_credentials(self.t, "frontera", verbose=True) From b87b240b27c9eb896ad65254678f9d873d730437 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 21:38:57 -0500 Subject: [PATCH 14/21] Using ruff instead of black --- .github/workflows/build-test.yml | 4 +- dapi/__init__.py | 17 ++-- dapi/apps.py | 4 +- dapi/client.py | 12 ++- dapi/db/__init__.py | 3 +- dapi/db/accessor.py | 10 +- dapi/db/config.py | 6 +- dapi/db/db.py | 12 ++- dapi/exceptions.py | 2 +- dapi/files.py | 17 ++-- dapi/jobs.py | 51 ++++++----- dapi/launcher.py | 2 +- examples/apps.ipynb | 21 +---- examples/db.ipynb | 10 +- examples/mpm/mpm-minimal.ipynb | 33 ++++++- examples/mpm/mpm.ipynb | 111 +++++++++++++++++++---- examples/openfoam/openfoam-minimal.ipynb | 42 +++++++-- examples/openfoam/openfoam.ipynb | 53 +++++++---- examples/opensees/OpenSeesMP-dapi.ipynb | 45 +++++++-- pyproject.toml | 15 ++- tests/files/test_encoding_consistency.py | 3 +- tests/jobs/test_dir_uri.py | 2 +- tests/jobs/test_runtime_summary.py | 1 - 23 files changed, 327 insertions(+), 149 deletions(-) diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 51ab33f..9d76c1c 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -18,7 +18,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: uv pip install --system ".[dev]" - - name: Lint with black - run: black --check . + - name: Lint and format check + run: ruff format --check . && ruff check . - name: Run tests run: pytest tests/ diff --git a/dapi/__init__.py b/dapi/__init__.py index 72f897f..74eef04 100644 --- a/dapi/__init__.py +++ b/dapi/__init__.py @@ -1,13 +1,13 @@ """Dapi - A Python wrapper for interacting with DesignSafe resources via the Tapis API. -This package provides a high-level, user-friendly interface for working with DesignSafe -resources through the Tapis V3 API. It simplifies complex operations and provides +This package provides a high-level, user-friendly interface for working with DesignSafe +resources through the Tapis V3 API. It simplifies complex operations and provides organized access to different service areas including authentication, file operations, job submission and monitoring, application discovery, system information, and database access. Key Features: - Simplified authentication with credential resolution hierarchy - - DesignSafe path translation (MyData, projects, etc.) to Tapis URIs + - DesignSafe path translation (MyData, projects, etc.) to Tapis URIs - High-level job submission with automatic app parameter mapping - Job monitoring with progress bars and status interpretation - File upload/download with automatic directory creation @@ -23,26 +23,26 @@ Example: Basic usage with automatic authentication: - + >>> from dapi import DSClient >>> client = DSClient() Enter DesignSafe Username: myuser Enter DesignSafe Password: [hidden] Authentication successful. - + >>> # File operations >>> client.files.upload("/local/file.txt", "/MyData/uploads/file.txt") >>> files = client.files.list("/MyData/uploads/") - + >>> # Job submission and monitoring >>> job_request = client.jobs.generate( ... app_id="matlab-r2023a", ... input_dir_uri="/MyData/analysis/input/", - ... script_filename="run_analysis.m" + ... script_filename="run_analysis.m", ... ) >>> job = client.jobs.submit(job_request) >>> final_status = job.monitor() - + >>> # Database access >>> df = client.db.ngl.read_sql("SELECT * FROM earthquake_data LIMIT 10") @@ -52,6 +52,7 @@ SubmittedJob: Class for managing submitted Tapis jobs. Exception classes: Custom exceptions for specific error conditions. """ + from .client import DSClient # Import exceptions diff --git a/dapi/apps.py b/dapi/apps.py index c21efdc..560cf9c 100644 --- a/dapi/apps.py +++ b/dapi/apps.py @@ -1,6 +1,6 @@ from tapipy.tapis import Tapis from tapipy.errors import BaseTapyException -from typing import List, Any, Optional +from typing import List, Optional from .exceptions import AppDiscoveryError @@ -105,7 +105,7 @@ def get_app_details( app_info = t.apps.getAppLatestVersion(appId=app_id) if verbose: - print(f"\nApp Details:") + print("\nApp Details:") print(f" ID: {app_info.id}") print(f" Version: {app_info.version}") print(f" Owner: {app_info.owner}") diff --git a/dapi/client.py b/dapi/client.py index 64056f3..86e9325 100644 --- a/dapi/client.py +++ b/dapi/client.py @@ -9,7 +9,7 @@ from .db.accessor import DatabaseAccessor # Import only the necessary classes/functions from jobs -from .jobs import SubmittedJob, interpret_job_status +from .jobs import SubmittedJob from typing import List, Optional, Dict, Any @@ -196,7 +196,9 @@ def to_path(self, *args, **kwargs) -> str: str: The corresponding DesignSafe local path (e.g., /home/jupyter/MyData/path). Example: - >>> local_path = ds.files.to_path("tapis://designsafe.storage.default/user/data") + >>> local_path = ds.files.to_path( + ... "tapis://designsafe.storage.default/user/data" + ... ) >>> print(local_path) # "/home/jupyter/MyData/data" """ return files_module.tapis_uri_to_local_path(*args, **kwargs) @@ -562,7 +564,7 @@ def generate( ... input_dir_uri="tapis://designsafe.storage.default/username/input/", ... script_filename="run_analysis.m", ... max_minutes=120, - ... allocation="MyProject-123" + ... allocation="MyProject-123", ... ) """ return jobs_module.generate_job_request( @@ -674,7 +676,9 @@ def interpret_status(self, final_status: str, job_uuid: Optional[str] = None): job_uuid (str, optional): The job UUID for context in the message. Example: - >>> ds.jobs.interpret_status("FINISHED", "12345678-1234-1234-1234-123456789abc") + >>> ds.jobs.interpret_status( + ... "FINISHED", "12345678-1234-1234-1234-123456789abc" + ... ) Job 12345678-1234-1234-1234-123456789abc completed successfully. """ jobs_module.interpret_job_status(final_status, job_uuid) diff --git a/dapi/db/__init__.py b/dapi/db/__init__.py index ed2ad57..e277c1d 100644 --- a/dapi/db/__init__.py +++ b/dapi/db/__init__.py @@ -13,5 +13,6 @@ >>> results = db.read_sql("SELECT * FROM table_name LIMIT 5") """ +from .db import DSDatabase as DSDatabase + name = "designsafe_db" -from .db import DSDatabase diff --git a/dapi/db/accessor.py b/dapi/db/accessor.py index d8d1707..a81bb4a 100644 --- a/dapi/db/accessor.py +++ b/dapi/db/accessor.py @@ -167,8 +167,8 @@ def close_all(self): Example: >>> accessor = DatabaseAccessor() >>> ngl_db = accessor.ngl # Creates connection - >>> vp_db = accessor.vp # Creates connection - >>> accessor.close_all() # Closes both connections + >>> vp_db = accessor.vp # Creates connection + >>> accessor.close_all() # Closes both connections Closing all active database engines/pools... Closing connection pool for database 'sjbrande_ngl_db'. Closing connection pool for database 'sjbrande_vpdb'. @@ -181,9 +181,9 @@ def close_all(self): try: # Call the close method on the DSDatabase instance db_instance.close() - self._connections[ - dbname - ] = None # Clear instance after closing engine + self._connections[dbname] = ( + None # Clear instance after closing engine + ) closed_count += 1 except Exception as e: print(f"Error closing engine for '{dbname}': {e}") diff --git a/dapi/db/config.py b/dapi/db/config.py index 14d063c..91c7525 100644 --- a/dapi/db/config.py +++ b/dapi/db/config.py @@ -10,15 +10,15 @@ Example: To access the NGL database configuration: - + >>> from .config import db_config >>> ngl_config = db_config["ngl"] >>> print(ngl_config["dbname"]) # "sjbrande_ngl_db" >>> print(ngl_config["env_prefix"]) # "NGL_" - + Environment variables would be: - NGL_DB_USER - - NGL_DB_PASSWORD + - NGL_DB_PASSWORD - NGL_DB_HOST - NGL_DB_PORT """ diff --git a/dapi/db/db.py b/dapi/db/db.py index 7f005e6..7ea0a66 100644 --- a/dapi/db/db.py +++ b/dapi/db/db.py @@ -14,7 +14,7 @@ >>> db = DSDatabase("ngl") Creating SQLAlchemy engine for database 'sjbrande_ngl_db' (ngl)... Engine for 'ngl' created. - + >>> df = db.read_sql("SELECT * FROM table_name LIMIT 5") >>> db.close() """ @@ -48,7 +48,7 @@ class DSDatabase: Example: >>> db = DSDatabase("ngl") >>> df = db.read_sql("SELECT COUNT(*) as total FROM users") - >>> print(df.iloc[0]['total']) + >>> print(df.iloc[0]["total"]) >>> db.close() """ @@ -69,7 +69,7 @@ def __init__(self, dbname="ngl"): Example: >>> db = DSDatabase("ngl") # Connect to NGL database - >>> db = DSDatabase("vp") # Connect to VP database + >>> db = DSDatabase("vp") # Connect to VP database """ if dbname not in db_config: raise ValueError( @@ -131,8 +131,10 @@ def read_sql(self, sql, output_type="DataFrame"): >>> print(df.columns.tolist()) # ['name', 'age'] >>> # Get dictionary result - >>> results = db.read_sql("SELECT COUNT(*) as total FROM users", output_type="dict") - >>> print(results[0]['total']) # 150 + >>> results = db.read_sql( + ... "SELECT COUNT(*) as total FROM users", output_type="dict" + ... ) + >>> print(results[0]["total"]) # 150 """ if not sql: raise ValueError("SQL query string is required") diff --git a/dapi/exceptions.py b/dapi/exceptions.py index 2ef9609..2293918 100644 --- a/dapi/exceptions.py +++ b/dapi/exceptions.py @@ -9,7 +9,7 @@ ... client.auth.authenticate() ... except AuthenticationError as e: ... print(f"Authentication failed: {e}") - + >>> try: ... client.files.upload("/local/file.txt", "tapis://system/path/file.txt") ... except FileOperationError as e: diff --git a/dapi/files.py b/dapi/files.py index 35a0bd5..0ce4bde 100644 --- a/dapi/files.py +++ b/dapi/files.py @@ -6,7 +6,6 @@ # import jwt from tapipy.tapis import Tapis from tapipy.errors import BaseTapyException -import json from .exceptions import FileOperationError, AuthenticationError from typing import List @@ -57,7 +56,7 @@ def _parse_tapis_uri(tapis_uri: str) -> (str, str): Example: >>> system_id, path = _parse_tapis_uri("tapis://mysystem/folder/file.txt") >>> print(system_id) # "mysystem" - >>> print(path) # "folder/file.txt" + >>> print(path) # "folder/file.txt" """ if not tapis_uri.startswith("tapis://"): raise ValueError( @@ -95,10 +94,14 @@ def tapis_uri_to_local_path(tapis_uri: str) -> str: ValueError: If the Tapis URI format is invalid. Example: - >>> local_path = tapis_uri_to_local_path("tapis://designsafe.storage.default/user/data/file.txt") + >>> local_path = tapis_uri_to_local_path( + ... "tapis://designsafe.storage.default/user/data/file.txt" + ... ) >>> print(local_path) # "/home/jupyter/MyData/data/file.txt" - >>> local_path = tapis_uri_to_local_path("tapis://designsafe.storage.community/datasets/earthquake.csv") + >>> local_path = tapis_uri_to_local_path( + ... "tapis://designsafe.storage.community/datasets/earthquake.csv" + ... ) >>> print(local_path) # "/home/jupyter/CommunityData/datasets/earthquake.csv" """ if not tapis_uri.startswith("tapis://"): @@ -352,7 +355,7 @@ def get_ds_path_uri(t: Tapis, path: str, verify_exists: bool = False) -> str: # involve checking the result count or specific item name, but this # basic check catches non-existent parent directories. t.files.listFiles(systemId=system_id, path=encoded_remote_path, limit=1) - print(f"Verification successful: Path exists.") + print("Verification successful: Path exists.") except BaseTapyException as e: # Specifically check for 404 on the listFiles call if hasattr(e, "response") and e.response and e.response.status_code == 404: @@ -437,7 +440,9 @@ def download_file(t: Tapis, remote_uri: str, local_path: str): FileOperationError: If the download operation fails or remote file not found. Example: - >>> download_file(client, "tapis://mysystem/data/results.txt", "/local/results.txt") + >>> download_file( + ... client, "tapis://mysystem/data/results.txt", "/local/results.txt" + ... ) Downloading from system 'mysystem' path 'data/results.txt' to '/local/results.txt'... Download complete. """ diff --git a/dapi/jobs.py b/dapi/jobs.py index a5e7ed0..fc69406 100644 --- a/dapi/jobs.py +++ b/dapi/jobs.py @@ -2,13 +2,10 @@ import time import json import os -import urllib.parse -import logging # Import logging for the timeout warning -from datetime import datetime, timedelta, timezone +from datetime import datetime from typing import Dict, Any, Optional, List from tapipy.tapis import Tapis from tapipy.errors import BaseTapyException -from dataclasses import dataclass, field, asdict from tqdm.auto import tqdm import pandas as pd from .apps import get_app_details @@ -163,21 +160,31 @@ def generate_job_request( "archiveSystemId": archive_system_id, **({"archiveSystemDir": archive_system_dir} if archive_system_dir else {}), "archiveOnAppError": getattr(job_attrs, "archiveOnAppError", True), - "execSystemLogicalQueue": queue - if queue is not None - else getattr(job_attrs, "execSystemLogicalQueue", None), - "nodeCount": node_count - if node_count is not None - else getattr(job_attrs, "nodeCount", None), - "coresPerNode": cores_per_node - if cores_per_node is not None - else getattr(job_attrs, "coresPerNode", None), - "maxMinutes": max_minutes - if max_minutes is not None - else getattr(job_attrs, "maxMinutes", None), - "memoryMB": memory_mb - if memory_mb is not None - else getattr(job_attrs, "memoryMB", None), + "execSystemLogicalQueue": ( + queue + if queue is not None + else getattr(job_attrs, "execSystemLogicalQueue", None) + ), + "nodeCount": ( + node_count + if node_count is not None + else getattr(job_attrs, "nodeCount", None) + ), + "coresPerNode": ( + cores_per_node + if cores_per_node is not None + else getattr(job_attrs, "coresPerNode", None) + ), + "maxMinutes": ( + max_minutes + if max_minutes is not None + else getattr(job_attrs, "maxMinutes", None) + ), + "memoryMB": ( + memory_mb + if memory_mb is not None + else getattr(job_attrs, "memoryMB", None) + ), **( {"isMpi": getattr(job_attrs, "isMpi", None)} if getattr(job_attrs, "isMpi", None) is not None @@ -893,7 +900,7 @@ def monitor(self, interval: int = 15, timeout_minutes: Optional[int] = None) -> return current_status # Should be a terminal state if loops finished except KeyboardInterrupt: - print(f"\nMonitoring interrupted by user.") + print("\nMonitoring interrupted by user.") return STATUS_INTERRUPTED except JobMonitorError as e: print(f"\nError during monitoring: {e}") @@ -906,12 +913,12 @@ def monitor(self, interval: int = 15, timeout_minutes: Optional[int] = None) -> if pbar_waiting is not None: try: pbar_waiting.close() - except: + except Exception: pass if pbar_monitoring is not None: try: pbar_monitoring.close() - except: + except Exception: pass def print_runtime_summary(self, verbose: bool = False): diff --git a/dapi/launcher.py b/dapi/launcher.py index caf9a10..beede12 100644 --- a/dapi/launcher.py +++ b/dapi/launcher.py @@ -128,7 +128,7 @@ def generate_sweep( f'pylauncher.ClassicLauncher("runsList.txt", debug="{debug}")\n' ) else: - script = "import pylauncher\n" 'pylauncher.ClassicLauncher("runsList.txt")\n' + script = 'import pylauncher\npylauncher.ClassicLauncher("runsList.txt")\n' (dirpath / "call_pylauncher.py").write_text(script, encoding="utf-8") return commands diff --git a/examples/apps.ipynb b/examples/apps.ipynb index ecf6ff3..72b4079 100644 --- a/examples/apps.ipynb +++ b/examples/apps.ipynb @@ -35,7 +35,8 @@ "metadata": {}, "outputs": [], "source": [ - "import sys, os\n", + "import sys\n", + "import os\n", "\n", "print(\n", " \"Old NumPy 1.24 installed!\"\n", @@ -76,23 +77,7 @@ "# Import only DSClient and exceptions needed at top level\n", "from dapi import (\n", " DSClient,\n", - " SubmittedJob,\n", - " interpret_job_status, # Import new function\n", - " AppDiscoveryError,\n", - " FileOperationError,\n", - " JobSubmissionError,\n", - " SystemInfoError,\n", - " JobMonitorError,\n", - " # Optionally import status constants if you want to check against them explicitly\n", - " STATUS_TIMEOUT,\n", - " STATUS_UNKNOWN,\n", - " TAPIS_TERMINAL_STATES,\n", - ")\n", - "import json\n", - "from datetime import datetime\n", - "from dataclasses import asdict\n", - "import pandas as pd\n", - "import tqdm as notebook_tqdm" + ")" ] }, { diff --git a/examples/db.ipynb b/examples/db.ipynb index 102466e..b7e8053 100644 --- a/examples/db.ipynb +++ b/examples/db.ipynb @@ -27,7 +27,8 @@ "metadata": {}, "outputs": [], "source": [ - "import sys, os\n", + "import sys\n", + "import os\n", "\n", "print(\n", " \"Old NumPy 1.24 installed!\"\n", @@ -58,12 +59,7 @@ "import os\n", "from dapi import DSClient # Import only the main client\n", "\n", - "# Import exceptions if needed for specific handling later\n", - "from dapi import FileOperationError, JobSubmissionError, JobMonitorError\n", - "import json\n", - "from datetime import datetime\n", - "from dataclasses import asdict\n", - "import pandas as pd" + "# Import exceptions if needed for specific handling later" ] }, { diff --git a/examples/mpm/mpm-minimal.ipynb b/examples/mpm/mpm-minimal.ipynb index 46f4c15..149c715 100644 --- a/examples/mpm/mpm-minimal.ipynb +++ b/examples/mpm/mpm-minimal.ipynb @@ -47,7 +47,8 @@ "metadata": {}, "outputs": [], "source": [ - "import sys, os\n", + "import sys\n", + "import os\n", "\n", "print(\n", " \"Old NumPy 1.24 installed!\"\n", @@ -137,7 +138,11 @@ "id": "3f0ee687", "metadata": {}, "outputs": [], - "source": "# Convert DesignSafe path to Tapis URI format\ninput_uri = ds.files.to_uri(ds_path)\nprint(f\"Input Directory Tapis URI: {input_uri}\")" + "source": [ + "# Convert DesignSafe path to Tapis URI format\n", + "input_uri = ds.files.to_uri(ds_path)\n", + "print(f\"Input Directory Tapis URI: {input_uri}\")" + ] }, { "cell_type": "code", @@ -145,7 +150,17 @@ "id": "6257d31a", "metadata": {}, "outputs": [], - "source": "# Generate job request dictionary using app defaults\njob_dict = ds.jobs.generate(\n app_id=app_id_to_use,\n input_dir_uri=input_uri,\n script_filename=input_filename,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n)\nprint(json.dumps(job_dict, indent=2, default=str))" + "source": [ + "# Generate job request dictionary using app defaults\n", + "job_dict = ds.jobs.generate(\n", + " app_id=app_id_to_use,\n", + " input_dir_uri=input_uri,\n", + " script_filename=input_filename,\n", + " max_minutes=max_job_minutes,\n", + " allocation=tacc_allocation,\n", + ")\n", + "print(json.dumps(job_dict, indent=2, default=str))" + ] }, { "cell_type": "code", @@ -212,7 +227,11 @@ "id": "8e04a5ef", "metadata": {}, "outputs": [], - "source": "# Submit the job to TACC\nsubmitted_job = ds.jobs.submit(job_dict)\nprint(f\"Job UUID: {submitted_job.uuid}\")" + "source": [ + "# Submit the job to TACC\n", + "submitted_job = ds.jobs.submit(job_dict)\n", + "print(f\"Job UUID: {submitted_job.uuid}\")" + ] }, { "cell_type": "code", @@ -335,7 +354,11 @@ "id": "6437373b", "metadata": {}, "outputs": [], - "source": "# Get current job status\ncurrent_status = ds.jobs.status(submitted_job.uuid)\nprint(f\"Current status: {current_status}\")" + "source": [ + "# Get current job status\n", + "current_status = ds.jobs.status(submitted_job.uuid)\n", + "print(f\"Current status: {current_status}\")" + ] }, { "cell_type": "code", diff --git a/examples/mpm/mpm.ipynb b/examples/mpm/mpm.ipynb index 3808838..29ab102 100644 --- a/examples/mpm/mpm.ipynb +++ b/examples/mpm/mpm.ipynb @@ -52,7 +52,8 @@ } ], "source": [ - "import sys, os\n", + "import sys\n", + "import os\n", "\n", "print(\n", " \"Old NumPy 1.24 installed!\"\n", @@ -117,23 +118,13 @@ "# Import only DSClient and exceptions needed at top level\n", "from dapi import (\n", " DSClient,\n", - " SubmittedJob,\n", - " interpret_job_status, # Import new function\n", " AppDiscoveryError,\n", " FileOperationError,\n", " JobSubmissionError,\n", " SystemInfoError,\n", " JobMonitorError,\n", - " # Optionally import status constants if you want to check against them explicitly\n", - " STATUS_TIMEOUT,\n", - " STATUS_UNKNOWN,\n", - " TAPIS_TERMINAL_STATES,\n", ")\n", - "import json\n", - "from datetime import datetime\n", - "from dataclasses import asdict\n", - "import pandas as pd\n", - "import tqdm as notebook_tqdm" + "import json" ] }, { @@ -193,7 +184,14 @@ "id": "3f0ee687", "metadata": {}, "outputs": [], - "source": "try:\n input_uri = ds.files.to_uri(ds_path)\n print(f\"Input Directory Tapis URI: {input_uri}\")\nexcept Exception as e:\n print(f\"Error translating path '{ds_path}': {e}\")\n raise SystemExit(\"Stopping notebook due to path translation error.\")" + "source": [ + "try:\n", + " input_uri = ds.files.to_uri(ds_path)\n", + " print(f\"Input Directory Tapis URI: {input_uri}\")\n", + "except Exception as e:\n", + " print(f\"Error translating path '{ds_path}': {e}\")\n", + " raise SystemExit(\"Stopping notebook due to path translation error.\")" + ] }, { "cell_type": "code", @@ -201,7 +199,27 @@ "id": "6257d31a", "metadata": {}, "outputs": [], - "source": "try:\n print(\"\\nGenerating job request dictionary...\")\n job_dict = ds.jobs.generate(\n app_id=app_id_to_use,\n input_dir_uri=input_uri,\n script_filename=input_filename,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n # queue=queue, # Uncomment if you want to specify a queue\n )\n print(\"\\n--- Generated Job Request Dictionary ---\")\n print(json.dumps(job_dict, indent=2, default=str))\n print(\"---------------------------------------\")\nexcept (AppDiscoveryError, ValueError, JobSubmissionError) as e:\n print(f\"Error generating job request: {e}\")\n raise SystemExit(\"Stopping notebook due to job request generation error.\")\nexcept Exception as e:\n print(f\"An unexpected error occurred during job request generation: {e}\")\n raise SystemExit(\"Stopping notebook due to unexpected generation error.\")" + "source": [ + "try:\n", + " print(\"\\nGenerating job request dictionary...\")\n", + " job_dict = ds.jobs.generate(\n", + " app_id=app_id_to_use,\n", + " input_dir_uri=input_uri,\n", + " script_filename=input_filename,\n", + " max_minutes=max_job_minutes,\n", + " allocation=tacc_allocation,\n", + " # queue=queue, # Uncomment if you want to specify a queue\n", + " )\n", + " print(\"\\n--- Generated Job Request Dictionary ---\")\n", + " print(json.dumps(job_dict, indent=2, default=str))\n", + " print(\"---------------------------------------\")\n", + "except (AppDiscoveryError, ValueError, JobSubmissionError) as e:\n", + " print(f\"Error generating job request: {e}\")\n", + " raise SystemExit(\"Stopping notebook due to job request generation error.\")\n", + "except Exception as e:\n", + " print(f\"An unexpected error occurred during job request generation: {e}\")\n", + " raise SystemExit(\"Stopping notebook due to unexpected generation error.\")" + ] }, { "cell_type": "code", @@ -272,7 +290,25 @@ "id": "8e04a5ef", "metadata": {}, "outputs": [], - "source": "if \"job_dict\" not in locals():\n print(\"Error: job_dict not found.\")\n raise SystemExit(\"Stopping notebook.\")\ntry:\n print(\"\\nSubmitting the job request dictionary...\")\n submitted_job = ds.jobs.submit(job_dict)\n print(f\"Job Submitted Successfully!\")\n print(f\"Job UUID: {submitted_job.uuid}\")\nexcept JobSubmissionError as e:\n print(f\"Job submission failed: {e}\")\n print(\"\\n--- Failed Job Request ---\")\n print(json.dumps(job_dict, indent=2, default=str))\n print(\"--------------------------\")\n raise SystemExit(\"Stopping notebook due to job submission error.\")\nexcept Exception as e:\n print(f\"An unexpected error occurred during job submission: {e}\")\n raise SystemExit(\"Stopping notebook due to unexpected submission error.\")" + "source": [ + "if \"job_dict\" not in locals():\n", + " print(\"Error: job_dict not found.\")\n", + " raise SystemExit(\"Stopping notebook.\")\n", + "try:\n", + " print(\"\\nSubmitting the job request dictionary...\")\n", + " submitted_job = ds.jobs.submit(job_dict)\n", + " print(\"Job Submitted Successfully!\")\n", + " print(f\"Job UUID: {submitted_job.uuid}\")\n", + "except JobSubmissionError as e:\n", + " print(f\"Job submission failed: {e}\")\n", + " print(\"\\n--- Failed Job Request ---\")\n", + " print(json.dumps(job_dict, indent=2, default=str))\n", + " print(\"--------------------------\")\n", + " raise SystemExit(\"Stopping notebook due to job submission error.\")\n", + "except Exception as e:\n", + " print(f\"An unexpected error occurred during job submission: {e}\")\n", + " raise SystemExit(\"Stopping notebook due to unexpected submission error.\")" + ] }, { "cell_type": "code", @@ -399,7 +435,7 @@ "source": [ "# Check against known good terminal states or the specific success state\n", "if final_status in [\"FINISHED\", \"FAILED\"]: # Or just: if final_status == \"FINISHED\":\n", - " print(f\"\\nAttempting to display runtime summary...\")\n", + " print(\"\\nAttempting to display runtime summary...\")\n", " try:\n", " submitted_job.print_runtime_summary(verbose=False)\n", " except Exception as e:\n", @@ -414,7 +450,24 @@ "id": "6437373b", "metadata": {}, "outputs": [], - "source": "if \"ds\" in locals() and \"submitted_job\" in locals(): # Check if ds and a job exist\n job_uuid_to_check = submitted_job.uuid # Or any other job UUID string\n try:\n print(\n f\"\\nFetching status for job {job_uuid_to_check} using ds.jobs.status()...\"\n )\n current_status = ds.jobs.status(job_uuid_to_check)\n print(f\"Status of job {job_uuid_to_check}: {current_status}\")\n except JobMonitorError as e:\n print(f\"Error getting job status: {e}\")\n except Exception as e:\n print(f\"An unexpected error occurred: {e}\")\nelse:\n print(\n \"DSClient ('ds') or submitted_job not initialized. Cannot demonstrate ds.jobs.status().\"\n )" + "source": [ + "if \"ds\" in locals() and \"submitted_job\" in locals(): # Check if ds and a job exist\n", + " job_uuid_to_check = submitted_job.uuid # Or any other job UUID string\n", + " try:\n", + " print(\n", + " f\"\\nFetching status for job {job_uuid_to_check} using ds.jobs.status()...\"\n", + " )\n", + " current_status = ds.jobs.status(job_uuid_to_check)\n", + " print(f\"Status of job {job_uuid_to_check}: {current_status}\")\n", + " except JobMonitorError as e:\n", + " print(f\"Error getting job status: {e}\")\n", + " except Exception as e:\n", + " print(f\"An unexpected error occurred: {e}\")\n", + "else:\n", + " print(\n", + " \"DSClient ('ds') or submitted_job not initialized. Cannot demonstrate ds.jobs.status().\"\n", + " )" + ] }, { "cell_type": "code", @@ -601,7 +654,7 @@ ], "source": [ "# if final_status in TAPIS_TERMINAL_STATES and final_status != STATUS_UNKNOWN: # Check if it's a known end state\n", - "print(f\"\\nAttempting to access archive information...\")\n", + "print(\"\\nAttempting to access archive information...\")\n", "try:\n", " archive_uri = submitted_job.archive_uri\n", " if archive_uri:\n", @@ -809,7 +862,25 @@ "id": "9aaef98a", "metadata": {}, "outputs": [], - "source": "# --- Example: List Queues for Frontera ---\ntry:\n print(\"\\n--- System Queue Information ---\")\n frontera_queues = ds.systems.queues(\"frontera\")\n # You can now inspect the 'frontera_queues' list\n # Example: Find if 'development' queue exists\n dev_queue_exists = any(q.name == \"development\" for q in frontera_queues)\n print(f\"Does 'development' queue exist on Frontera? {dev_queue_exists}\")\n\n # Example: List queues for a non-existent system\n ds.systems.queues(\"non-existent-system\") # This would raise SystemInfoError\n\nexcept SystemInfoError as e:\n print(f\"Error getting system info: {e}\")\nexcept Exception as e:\n print(f\"An unexpected error occurred: {e}\")\nprint(\"-----------------------------\")" + "source": [ + "# --- Example: List Queues for Frontera ---\n", + "try:\n", + " print(\"\\n--- System Queue Information ---\")\n", + " frontera_queues = ds.systems.queues(\"frontera\")\n", + " # You can now inspect the 'frontera_queues' list\n", + " # Example: Find if 'development' queue exists\n", + " dev_queue_exists = any(q.name == \"development\" for q in frontera_queues)\n", + " print(f\"Does 'development' queue exist on Frontera? {dev_queue_exists}\")\n", + "\n", + " # Example: List queues for a non-existent system\n", + " ds.systems.queues(\"non-existent-system\") # This would raise SystemInfoError\n", + "\n", + "except SystemInfoError as e:\n", + " print(f\"Error getting system info: {e}\")\n", + "except Exception as e:\n", + " print(f\"An unexpected error occurred: {e}\")\n", + "print(\"-----------------------------\")" + ] }, { "cell_type": "markdown", @@ -839,7 +910,7 @@ " # Example: Try translating a non-existent path with verification (will raise error)\n", " print(f\"\\nTranslating and verifying non-existent path: {ds_path_nonexistent}\")\n", " input_uri_bad = ds.files.to_uri(ds_path_nonexistent, verify_exists=True)\n", - " print(f\"This line should not be reached.\")\n", + " print(\"This line should not be reached.\")\n", "\n", "except FileOperationError as e:\n", " print(f\"Error during path translation/verification: {e}\")\n", diff --git a/examples/openfoam/openfoam-minimal.ipynb b/examples/openfoam/openfoam-minimal.ipynb index 8f21436..aeaf74d 100644 --- a/examples/openfoam/openfoam-minimal.ipynb +++ b/examples/openfoam/openfoam-minimal.ipynb @@ -41,7 +41,8 @@ } ], "source": [ - "import sys, os\n", + "import sys\n", + "import os\n", "\n", "print(\n", " \"Old NumPy 1.24 installed!\"\n", @@ -115,14 +116,30 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "# Convert DesignSafe path to Tapis URI format\ninput_uri = ds.files.to_uri(ds_path)\nprint(f\"Input Directory Tapis URI: {input_uri}\")" + "source": [ + "# Convert DesignSafe path to Tapis URI format\n", + "input_uri = ds.files.to_uri(ds_path)\n", + "print(f\"Input Directory Tapis URI: {input_uri}\")" + ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], - "source": "# Generate job request dictionary using app defaults\njob_dict = ds.jobs.generate(\n app_id=app_id_to_use,\n input_dir_uri=input_uri,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n archive_system=\"designsafe\",\n extra_env_vars=openfoam_env_vars,\n input_dir_param_name=\"Case Directory\", # OpenFOAM apps use \"Case Directory\" instead of \"Input Directory\"\n)\nprint(json.dumps(job_dict, indent=2, default=str))" + "source": [ + "# Generate job request dictionary using app defaults\n", + "job_dict = ds.jobs.generate(\n", + " app_id=app_id_to_use,\n", + " input_dir_uri=input_uri,\n", + " max_minutes=max_job_minutes,\n", + " allocation=tacc_allocation,\n", + " archive_system=\"designsafe\",\n", + " extra_env_vars=openfoam_env_vars,\n", + " input_dir_param_name=\"Case Directory\", # OpenFOAM apps use \"Case Directory\" instead of \"Input Directory\"\n", + ")\n", + "print(json.dumps(job_dict, indent=2, default=str))" + ] }, { "cell_type": "code", @@ -195,7 +212,11 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "# Submit the job to TACC\nsubmitted_job = ds.jobs.submit(job_dict)\nprint(f\"Job UUID: {submitted_job.uuid}\")" + "source": [ + "# Submit the job to TACC\n", + "submitted_job = ds.jobs.submit(job_dict)\n", + "print(f\"Job UUID: {submitted_job.uuid}\")" + ] }, { "cell_type": "code", @@ -284,7 +305,11 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "# Get current job status\ncurrent_status = ds.jobs.status(submitted_job.uuid)\nprint(f\"Current status: {current_status}\")" + "source": [ + "# Get current job status\n", + "current_status = ds.jobs.status(submitted_job.uuid)\n", + "print(f\"Current status: {current_status}\")" + ] }, { "cell_type": "code", @@ -422,7 +447,11 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "# Get DesignSafe Jupyter path\narchive_path = ds.files.to_path(archive_uri)\nprint(archive_path)" + "source": [ + "# Get DesignSafe Jupyter path\n", + "archive_path = ds.files.to_path(archive_uri)\n", + "print(archive_path)" + ] }, { "cell_type": "code", @@ -430,7 +459,6 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import os" ] diff --git a/examples/openfoam/openfoam.ipynb b/examples/openfoam/openfoam.ipynb index f6ef83c..c56ebcd 100644 --- a/examples/openfoam/openfoam.ipynb +++ b/examples/openfoam/openfoam.ipynb @@ -32,7 +32,8 @@ "metadata": {}, "outputs": [], "source": [ - "import sys, os\n", + "import sys\n", + "import os\n", "\n", "print(\n", " \"Old NumPy 1.24 installed!\"\n", @@ -74,21 +75,11 @@ "source": [ "import os\n", "import json\n", - "from datetime import datetime\n", "\n", "# Import DAPI components\n", "from dapi import (\n", " DSClient,\n", - " SubmittedJob,\n", - " interpret_job_status,\n", - " AppDiscoveryError,\n", " FileOperationError,\n", - " JobSubmissionError,\n", - " SystemInfoError,\n", - " JobMonitorError,\n", - " STATUS_TIMEOUT,\n", - " STATUS_UNKNOWN,\n", - " TAPIS_TERMINAL_STATES,\n", ")\n", "\n", "print(\"DAPI imports successful.\")" @@ -625,7 +616,10 @@ "scrolled": true }, "outputs": [], - "source": "# Translate local path to Tapis URI\ninput_uri = ds.files.to_uri(ds_path, verify_exists=True)" + "source": [ + "# Translate local path to Tapis URI\n", + "input_uri = ds.files.to_uri(ds_path, verify_exists=True)" + ] }, { "cell_type": "markdown", @@ -639,7 +633,26 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "# Generate job request using DAPI\n\njob_dict = ds.jobs.generate(\n app_id=app_id,\n input_dir_uri=input_uri,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n archive_system=archive_system,\n job_name=job_name,\n node_count=node_count, # Optional resource override\n cores_per_node=cores_per_node, # Optional resource override\n extra_env_vars=openfoam_env_vars,\n)\n\n\nprint(\"\\n--- Generated Job Request Dictionary ---\")\nprint(json.dumps(job_dict, indent=2, default=str))\nprint(\"---------------------------------------\")" + "source": [ + "# Generate job request using DAPI\n", + "\n", + "job_dict = ds.jobs.generate(\n", + " app_id=app_id,\n", + " input_dir_uri=input_uri,\n", + " max_minutes=max_job_minutes,\n", + " allocation=tacc_allocation,\n", + " archive_system=archive_system,\n", + " job_name=job_name,\n", + " node_count=node_count, # Optional resource override\n", + " cores_per_node=cores_per_node, # Optional resource override\n", + " extra_env_vars=openfoam_env_vars,\n", + ")\n", + "\n", + "\n", + "print(\"\\n--- Generated Job Request Dictionary ---\")\n", + "print(json.dumps(job_dict, indent=2, default=str))\n", + "print(\"---------------------------------------\")" + ] }, { "cell_type": "markdown", @@ -653,7 +666,10 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "# Submit the job\nsubmitted_job = ds.jobs.submit(job_dict)" + "source": [ + "# Submit the job\n", + "submitted_job = ds.jobs.submit(job_dict)" + ] }, { "cell_type": "markdown", @@ -843,7 +859,7 @@ "source": [ "# Access job archive\n", "try:\n", - " print(f\"\\nAttempting to access archive information...\")\n", + " print(\"\\nAttempting to access archive information...\")\n", " archive_uri = submitted_job.archive_uri\n", " if archive_uri:\n", " print(f\"Job Archive Tapis URI: {archive_uri}\")\n", @@ -894,7 +910,11 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "# Get DesignSafe Jupyter path\narchive_path = ds.files.to_path(archive_uri)\nprint(archive_path)" + "source": [ + "# Get DesignSafe Jupyter path\n", + "archive_path = ds.files.to_path(archive_uri)\n", + "print(archive_path)" + ] }, { "cell_type": "code", @@ -902,7 +922,6 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import os" ] diff --git a/examples/opensees/OpenSeesMP-dapi.ipynb b/examples/opensees/OpenSeesMP-dapi.ipynb index ce3a083..98e2085 100644 --- a/examples/opensees/OpenSeesMP-dapi.ipynb +++ b/examples/opensees/OpenSeesMP-dapi.ipynb @@ -64,7 +64,8 @@ } ], "source": [ - "import sys, os\n", + "import sys\n", + "import os\n", "\n", "print(\n", " \"Old NumPy 1.24 installed!\"\n", @@ -91,9 +92,7 @@ "metadata": {}, "outputs": [], "source": [ - "%matplotlib inline\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt" + "%matplotlib inline" ] }, { @@ -129,8 +128,7 @@ "# Import DAPI and other required libraries\n", "from dapi import DSClient\n", "import os\n", - "import json\n", - "from datetime import date" + "import json" ] }, { @@ -162,7 +160,12 @@ "id": "cell-8", "metadata": {}, "outputs": [], - "source": "ds_path = os.getcwd() + \"/DS_input\"\nprint(f\"DesignSafe path: {ds_path}\")\ninput_uri = ds.files.to_uri(ds_path)\nprint(f\"Input URI: {input_uri}\")" + "source": [ + "ds_path = os.getcwd() + \"/DS_input\"\n", + "print(f\"DesignSafe path: {ds_path}\")\n", + "input_uri = ds.files.to_uri(ds_path)\n", + "print(f\"Input URI: {input_uri}\")" + ] }, { "cell_type": "code", @@ -190,7 +193,18 @@ "id": "5286f5ac", "metadata": {}, "outputs": [], - "source": "# Generate job request dictionary using app defaults\njob_dict = ds.jobs.generate(\n app_id=app_id,\n input_dir_uri=input_uri,\n script_filename=input_filename,\n archive_system=archive_system,\n max_minutes=max_job_minutes,\n allocation=tacc_allocation,\n queue=queue,\n)" + "source": [ + "# Generate job request dictionary using app defaults\n", + "job_dict = ds.jobs.generate(\n", + " app_id=app_id,\n", + " input_dir_uri=input_uri,\n", + " script_filename=input_filename,\n", + " archive_system=archive_system,\n", + " max_minutes=max_job_minutes,\n", + " allocation=tacc_allocation,\n", + " queue=queue,\n", + ")" + ] }, { "cell_type": "code", @@ -269,7 +283,14 @@ "id": "cell-12", "metadata": {}, "outputs": [], - "source": "# Submit job using dapi\nsubmitted_job = ds.jobs.submit(job_dict)\nprint(f\"Job launched with UUID: {submitted_job.uuid}\")\nprint(\n \"Can also check in DesignSafe portal under - Workspace > Tools & Application > Job Status\"\n)" + "source": [ + "# Submit job using dapi\n", + "submitted_job = ds.jobs.submit(job_dict)\n", + "print(f\"Job launched with UUID: {submitted_job.uuid}\")\n", + "print(\n", + " \"Can also check in DesignSafe portal under - Workspace > Tools & Application > Job Status\"\n", + ")" + ] }, { "cell_type": "code", @@ -495,7 +516,11 @@ "id": "96757814-3a1f-4981-9a3b-f27fc86a553c", "metadata": {}, "outputs": [], - "source": "# Get DesignSafe Jupyter path\narchive_path = ds.files.to_path(input_dir_archive_uri)\nprint(archive_path)" + "source": [ + "# Get DesignSafe Jupyter path\n", + "archive_path = ds.files.to_path(input_dir_archive_uri)\n", + "print(archive_path)" + ] }, { "cell_type": "code", diff --git a/pyproject.toml b/pyproject.toml index ad4dcdf..c052b51 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dependencies = [ [project.optional-dependencies] dev = [ "pytest>=7.4.2", - "black[jupyter]>=23.11.0", + "ruff>=0.8.0", "ipykernel>=6.26.0", ] docs = [ @@ -41,5 +41,18 @@ docs = [ requires = ["hatchling"] build-backend = "hatchling.build" +[tool.ruff] +target-version = "py39" +line-length = 88 + +[tool.ruff.lint] +ignore = ["E741"] + +[tool.ruff.lint.per-file-ignores] +"examples/**" = ["E402"] + +[tool.ruff.format] +docstring-code-format = true + [tool.hatch.build.targets.wheel] packages = ["dapi"] diff --git a/tests/files/test_encoding_consistency.py b/tests/files/test_encoding_consistency.py index f7fcde5..bd166e5 100644 --- a/tests/files/test_encoding_consistency.py +++ b/tests/files/test_encoding_consistency.py @@ -1,8 +1,7 @@ import unittest -from unittest.mock import MagicMock, Mock +from unittest.mock import MagicMock from dapi.files import _safe_quote, _parse_tapis_uri, get_ds_path_uri from tapipy.tapis import Tapis -import urllib.parse class TestEncodingConsistency(unittest.TestCase): diff --git a/tests/jobs/test_dir_uri.py b/tests/jobs/test_dir_uri.py index 611fc90..4e914ac 100644 --- a/tests/jobs/test_dir_uri.py +++ b/tests/jobs/test_dir_uri.py @@ -1,5 +1,5 @@ import unittest -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock from dapi.files import get_ds_path_uri from tapipy.tapis import Tapis diff --git a/tests/jobs/test_runtime_summary.py b/tests/jobs/test_runtime_summary.py index e8d9255..c35048a 100644 --- a/tests/jobs/test_runtime_summary.py +++ b/tests/jobs/test_runtime_summary.py @@ -3,7 +3,6 @@ from io import StringIO import sys from datetime import datetime, timedelta -import re import dapi as ds From e411cc3a9bd3e314a38cc8a2c29817ef35941f06 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Wed, 18 Mar 2026 22:10:42 -0500 Subject: [PATCH 15/21] Fix tests --- tests/auth/test_auth.py | 2 + tests/jobs/test_archive_config.py | 24 +++++-- tests/jobs/test_dir_uri.py | 16 +++-- tests/jobs/test_job_gen_jobinfo.py | 112 +++++++++++++++++++++++++---- tests/jobs/test_job_status.py | 60 +++++++--------- tests/jobs/test_runtime_summary.py | 16 ++++- 6 files changed, 168 insertions(+), 62 deletions(-) diff --git a/tests/auth/test_auth.py b/tests/auth/test_auth.py index 08ce93d..13dc432 100644 --- a/tests/auth/test_auth.py +++ b/tests/auth/test_auth.py @@ -23,6 +23,7 @@ def test_init_with_env_variables(self, mock_environ, mock_tapis): base_url="https://designsafe.tapis.io", username="test_user", password="test_password", + download_latest_specs=False, ) mock_tapis_obj.get_tokens.assert_called_once() self.assertEqual(result, mock_tapis_obj) @@ -49,6 +50,7 @@ def test_init_with_user_input( base_url="https://designsafe.tapis.io", username="test_user", password="test_password", + download_latest_specs=False, ) mock_tapis_obj.get_tokens.assert_called_once() self.assertEqual(result, mock_tapis_obj) diff --git a/tests/jobs/test_archive_config.py b/tests/jobs/test_archive_config.py index 08e3762..c4f8c78 100644 --- a/tests/jobs/test_archive_config.py +++ b/tests/jobs/test_archive_config.py @@ -1,8 +1,17 @@ import unittest -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, Mock, patch from dapi.jobs import generate_job_request +def _make_app_arg(name, arg="", input_mode="INCLUDE_ON_DEMAND"): + """Create a mock app arg with a real .name attribute.""" + m = Mock() + m.name = name + m.arg = arg + m.inputMode = input_mode + return m + + class TestArchiveConfiguration(unittest.TestCase): """Test cases for archive system configuration in job generation""" @@ -30,13 +39,20 @@ def setUp(self): self.mock_job_attrs.memoryMB = 1000 self.mock_job_attrs.isMpi = False - # Mock parameter set - self.mock_param_set = MagicMock() - self.mock_param_set.appArgs = [MagicMock(name="Main Script")] + # Mock parameter set with properly named appArgs + self.mock_param_set = Mock() + self.mock_param_set.appArgs = [_make_app_arg("Main Script")] self.mock_param_set.envVariables = [] self.mock_param_set.schedulerOptions = [] self.mock_job_attrs.parameterSet = self.mock_param_set + # Mock fileInputs so input directory detection works + input_dir_fi = Mock() + input_dir_fi.name = "Input Directory" + input_dir_fi.targetPath = None + input_dir_fi.autoMountLocal = True + self.mock_job_attrs.fileInputs = [input_dir_fi] + self.mock_app.jobAttributes = self.mock_job_attrs @patch("dapi.jobs.get_app_details") diff --git a/tests/jobs/test_dir_uri.py b/tests/jobs/test_dir_uri.py index 4e914ac..fd8a56a 100644 --- a/tests/jobs/test_dir_uri.py +++ b/tests/jobs/test_dir_uri.py @@ -1,18 +1,20 @@ import unittest -from unittest.mock import MagicMock +from unittest.mock import MagicMock, Mock from dapi.files import get_ds_path_uri -from tapipy.tapis import Tapis class TestGetDsPathUri(unittest.TestCase): def setUp(self): - # Mocking the Tapis object - self.t = MagicMock(spec=Tapis) + # Use MagicMock without spec so dynamic attributes like .systems work + self.t = MagicMock() self.t.username = "testuser" - # Correctly mocking the get method - self.t.get = MagicMock() - self.t.get.return_value.json.return_value = {"baseProject": {"uuid": "12345"}} + # Mock the systems.getSystems call for project path lookups + # Return a single matching system for any project query + mock_system = Mock() + mock_system.id = "project-12345" + mock_system.description = "ProjA ProjB project" + self.t.systems.getSystems.return_value = [mock_system] def test_directory_patterns(self): test_cases = [ diff --git a/tests/jobs/test_job_gen_jobinfo.py b/tests/jobs/test_job_gen_jobinfo.py index 582be02..8b2bd04 100644 --- a/tests/jobs/test_job_gen_jobinfo.py +++ b/tests/jobs/test_job_gen_jobinfo.py @@ -4,38 +4,86 @@ from datetime import datetime +def _make_app_arg(name, arg="", input_mode="INCLUDE_ON_DEMAND"): + """Create a mock app arg with a real .name attribute.""" + m = Mock() + m.name = name + m.arg = arg + m.inputMode = input_mode + return m + + +def _make_env_var(key, value="", input_mode="INCLUDE_ON_DEMAND"): + """Create a mock env var with a real .key attribute.""" + m = Mock() + m.key = key + m.value = value + m.inputMode = input_mode + return m + + +def _make_file_input(name, target_path=None, auto_mount=True): + """Create a mock file input definition.""" + m = Mock() + m.name = name + m.targetPath = target_path + m.autoMountLocal = auto_mount + return m + + class TestGenerateJobInfo(unittest.TestCase): def setUp(self): self.t_mock = Mock() self.app_name = "test-app" self.input_uri = "tapis://test-system/input/data" self.input_file = "input.txt" - # Mock the getAppLatestVersion method + + # Mock app details (returned by get_app_details) self.app_info_mock = Mock() self.app_info_mock.id = self.app_name self.app_info_mock.version = "1.0" + self.app_info_mock.description = "Test app" + + # Job attributes self.app_info_mock.jobAttributes.execSystemId = "test-exec-system" self.app_info_mock.jobAttributes.maxMinutes = 60 self.app_info_mock.jobAttributes.archiveOnAppError = True self.app_info_mock.jobAttributes.execSystemLogicalQueue = "normal" - self.t_mock.apps.getAppLatestVersion.return_value = self.app_info_mock + self.app_info_mock.jobAttributes.nodeCount = 1 + self.app_info_mock.jobAttributes.coresPerNode = 1 + self.app_info_mock.jobAttributes.memoryMB = None + self.app_info_mock.jobAttributes.isMpi = None + self.app_info_mock.jobAttributes.cmdPrefix = None + self.app_info_mock.jobAttributes.archiveSystemId = None + self.app_info_mock.jobAttributes.archiveSystemDir = None + + # Parameter set with proper mock objects + param_set = Mock() + param_set.appArgs = [_make_app_arg("Input Script")] + param_set.envVariables = [] + param_set.schedulerOptions = [] + self.app_info_mock.jobAttributes.parameterSet = param_set + # File inputs + self.app_info_mock.jobAttributes.fileInputs = [ + _make_file_input("Input Directory") + ] + + @patch("dapi.jobs.get_app_details") @patch("dapi.jobs.datetime") - def test_generate_job_info_default(self, mock_datetime): + def test_generate_job_info_default(self, mock_datetime, mock_get_app): mock_datetime.now.return_value = datetime(2023, 5, 1, 12, 0, 0) + mock_get_app.return_value = self.app_info_mock + result = generate_job_request( self.t_mock, self.app_name, self.input_uri, self.input_file ) - self.assertEqual(result["name"], f"{self.app_name}_20230501_120000") + self.assertEqual(result["name"], f"{self.app_name}-20230501_120000") self.assertEqual(result["appId"], self.app_name) self.assertEqual(result["appVersion"], "1.0") self.assertEqual(result["execSystemId"], "test-exec-system") self.assertEqual(result["maxMinutes"], 60) self.assertTrue(result["archiveOnAppError"]) - self.assertEqual( - result["fileInputs"], - [{"name": "Input Directory", "sourceUrl": self.input_uri}], - ) self.assertEqual(result["execSystemLogicalQueue"], "normal") self.assertEqual(result["nodeCount"], 1) self.assertEqual(result["coresPerNode"], 1) @@ -45,7 +93,10 @@ def test_generate_job_info_default(self, mock_datetime): ) self.assertNotIn("schedulerOptions", result["parameterSet"]) - def test_generate_job_info_custom(self): + @patch("dapi.jobs.get_app_details") + def test_generate_job_info_custom(self, mock_get_app): + mock_get_app.return_value = self.app_info_mock + custom_job_name = "custom-job" custom_max_minutes = 120 custom_node_count = 2 @@ -73,14 +124,49 @@ def test_generate_job_info_custom(self): [{"name": "TACC Allocation", "arg": f"-A {custom_allocation}"}], ) - def test_generate_job_info_invalid_app(self): - self.t_mock.apps.getAppLatestVersion.side_effect = Exception("Invalid app") + @patch("dapi.jobs.get_app_details") + def test_generate_job_info_invalid_app(self, mock_get_app): + mock_get_app.side_effect = Exception("Invalid app") with self.assertRaises(Exception): generate_job_request(self.t_mock, "invalid-app", self.input_uri) - def test_generate_job_info_opensees(self): + @patch("dapi.jobs.get_app_details") + def test_generate_job_info_opensees(self, mock_get_app): opensees_app_name = "opensees-express" - result = generate_job_request(self.t_mock, opensees_app_name, self.input_uri) + + # Create a separate app mock for opensees with envVariables containing tclScript + opensees_app = Mock() + opensees_app.id = opensees_app_name + opensees_app.version = "1.0" + opensees_app.description = "OpenSees app" + opensees_app.jobAttributes.execSystemId = "test-exec-system" + opensees_app.jobAttributes.maxMinutes = 60 + opensees_app.jobAttributes.archiveOnAppError = True + opensees_app.jobAttributes.execSystemLogicalQueue = "normal" + opensees_app.jobAttributes.nodeCount = 1 + opensees_app.jobAttributes.coresPerNode = 1 + opensees_app.jobAttributes.memoryMB = None + opensees_app.jobAttributes.isMpi = None + opensees_app.jobAttributes.cmdPrefix = None + opensees_app.jobAttributes.archiveSystemId = None + opensees_app.jobAttributes.archiveSystemDir = None + + # OpenSees uses envVariables for the script, not appArgs + param_set = Mock() + param_set.appArgs = [] + param_set.envVariables = [_make_env_var("tclScript")] + param_set.schedulerOptions = [] + opensees_app.jobAttributes.parameterSet = param_set + opensees_app.jobAttributes.fileInputs = [_make_file_input("Input Directory")] + + mock_get_app.return_value = opensees_app + + result = generate_job_request( + self.t_mock, + opensees_app_name, + self.input_uri, + script_filename=self.input_file, + ) self.assertIn("parameterSet", result) self.assertIn("envVariables", result["parameterSet"]) self.assertEqual( diff --git a/tests/jobs/test_job_status.py b/tests/jobs/test_job_status.py index 4960ad8..9e70c3b 100644 --- a/tests/jobs/test_job_status.py +++ b/tests/jobs/test_job_status.py @@ -1,58 +1,46 @@ import unittest -from unittest.mock import Mock, patch +from unittest.mock import Mock, patch, MagicMock import dapi as ds class TestGetStatus(unittest.TestCase): - @patch("time.sleep", Mock()) # Mocks the sleep function - def test_get_status(self): - # Mock the Tapis client object + @patch("dapi.jobs.SubmittedJob") + def test_get_status(self, mock_submitted_job_cls): mock_tapis = Mock() - # Define behavior for getJobStatus method - mock_tapis.jobs.getJobStatus.side_effect = [ - Mock(status="PENDING"), - Mock(status="PENDING"), - Mock(status="RUNNING"), - Mock(status="RUNNING"), - Mock(status="FINISHED"), - ] + # Set up the mock SubmittedJob instance + mock_job_instance = MagicMock() + mock_job_instance.get_status.return_value = "FINISHED" + mock_submitted_job_cls.return_value = mock_job_instance - # Define behavior for getJob method - mock_tapis.jobs.getJob.return_value = Mock(maxMinutes=1) - - # Call get_status - status = ds.jobs.get_status(mock_tapis, "some_job_uuid", tlapse=1) + # Call get_job_status (no tlapse parameter) + status = ds.jobs.get_job_status(mock_tapis, "some_job_uuid") # Assert that the final status is "FINISHED" self.assertEqual(status, "FINISHED") - # Assert the methods were called the expected number of times - mock_tapis.jobs.getJobStatus.assert_called_with(jobUuid="some_job_uuid") - self.assertEqual(mock_tapis.jobs.getJobStatus.call_count, 5) - mock_tapis.jobs.getJob.assert_called_once_with(jobUuid="some_job_uuid") + # Assert SubmittedJob was created with the right arguments + mock_submitted_job_cls.assert_called_once_with(mock_tapis, "some_job_uuid") + mock_job_instance.get_status.assert_called_once_with(force_refresh=True) - @patch("time.sleep", Mock()) - def test_get_status_timeout(self): - # Mock the Tapis client object + @patch("dapi.jobs.SubmittedJob") + def test_get_status_running(self, mock_submitted_job_cls): mock_tapis = Mock() - # Define behavior for getJobStatus method to simulate a job that doesn't finish - mock_tapis.jobs.getJobStatus.return_value = Mock(status="RUNNING") - - # Define behavior for getJob method - mock_tapis.jobs.getJob.return_value = Mock(maxMinutes=1) + # Set up the mock SubmittedJob instance to return RUNNING + mock_job_instance = MagicMock() + mock_job_instance.get_status.return_value = "RUNNING" + mock_submitted_job_cls.return_value = mock_job_instance - # Call get_status - status = ds.jobs.get_status(mock_tapis, "some_job_uuid", tlapse=1) + # Call get_job_status + status = ds.jobs.get_job_status(mock_tapis, "some_job_uuid") - # Assert that the final status is still "RUNNING" due to timeout + # Assert that the status is "RUNNING" self.assertEqual(status, "RUNNING") - # Assert the methods were called the expected number of times - expected_calls = 60 # 1 minute = 60 seconds, with tlapse=1 - self.assertGreaterEqual(mock_tapis.jobs.getJobStatus.call_count, expected_calls) - mock_tapis.jobs.getJob.assert_called_once_with(jobUuid="some_job_uuid") + # Assert the SubmittedJob was created correctly + mock_submitted_job_cls.assert_called_once_with(mock_tapis, "some_job_uuid") + mock_job_instance.get_status.assert_called_once_with(force_refresh=True) if __name__ == "__main__": diff --git a/tests/jobs/test_runtime_summary.py b/tests/jobs/test_runtime_summary.py index c35048a..5bcfaca 100644 --- a/tests/jobs/test_runtime_summary.py +++ b/tests/jobs/test_runtime_summary.py @@ -1,10 +1,11 @@ import unittest -from unittest.mock import Mock +from unittest.mock import Mock, patch from io import StringIO import sys from datetime import datetime, timedelta import dapi as ds +from dapi.jobs import SubmittedJob class TestRuntimeSummary(unittest.TestCase): @@ -95,7 +96,18 @@ def capture_output(self, t_mock, job_id, verbose): try: out = StringIO() sys.stdout = out - ds.jobs.runtime_summary(t_mock, job_id, verbose) + # Patch SubmittedJob.__init__ to skip the isinstance check + with patch.object( + SubmittedJob, + "__init__", + lambda self, tc, ju: ( + setattr(self, "_tapis", tc) + or setattr(self, "uuid", ju) + or setattr(self, "_last_status", None) + or setattr(self, "_job_details", None) + ), + ): + ds.jobs.get_runtime_summary(t_mock, job_id, verbose) return out.getvalue().strip() finally: sys.stdout = saved_stdout From 5b687342d2da3ad5116cf4dd07589c22f25275df Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Thu, 19 Mar 2026 13:07:41 -0500 Subject: [PATCH 16/21] Fix archive system --- dapi/client.py | 13 ++- docs/examples/pylauncher.md | 6 +- docs/jobs.md | 10 +- examples/pylauncher_sweep.ipynb | 176 ++++---------------------------- 4 files changed, 46 insertions(+), 159 deletions(-) diff --git a/dapi/client.py b/dapi/client.py index 86e9325..bb0bf66 100644 --- a/dapi/client.py +++ b/dapi/client.py @@ -425,6 +425,8 @@ def submit( cores_per_node: Optional[int] = None, max_minutes: Optional[int] = None, queue: Optional[str] = None, + archive_system: Optional[str] = "designsafe", + archive_path: Optional[str] = None, **kwargs, ): """Submit a PyLauncher sweep job. @@ -432,16 +434,23 @@ def submit( Translates *directory* to a Tapis URI, builds a job request with ``call_pylauncher.py`` as the script, and submits it. + Archives to the user's DesignSafe storage by default (not the + app's archive path, which may belong to the app owner). + Args: directory: Path to the input directory containing ``runsList.txt`` and ``call_pylauncher.py`` (e.g. ``"/MyData/sweep/"``). - app_id: Tapis application ID (e.g. ``"openseespy-s3"``). + app_id: Tapis application ID (e.g. ``"designsafe-agnostic-app"``). allocation: TACC allocation to charge. node_count: Number of compute nodes. cores_per_node: Cores per node. max_minutes: Maximum runtime in minutes. queue: Execution queue name. + archive_system: Archive system. Defaults to ``"designsafe"`` + (the user's own storage). + archive_path: Archive directory path. If None, uses the + default ``tapis-jobs-archive/`` under the user's MyData. **kwargs: Additional arguments passed to ``ds.jobs.generate()``. @@ -459,6 +468,8 @@ def submit( max_minutes=max_minutes, queue=queue, allocation=allocation, + archive_system=archive_system, + archive_path=archive_path, **kwargs, ) return jobs_module.submit_job_request(self._tapis, job_request) diff --git a/docs/examples/pylauncher.md b/docs/examples/pylauncher.md index 6257a56..5b6fba3 100644 --- a/docs/examples/pylauncher.md +++ b/docs/examples/pylauncher.md @@ -59,7 +59,7 @@ ds.jobs.parametric_sweep.generate( ```python job = ds.jobs.parametric_sweep.submit( "/MyData/pylauncher_demo/", - app_id="agnostic", + app_id="designsafe-agnostic-app", allocation="your_allocation", node_count=1, cores_per_node=48, @@ -104,7 +104,7 @@ ds.jobs.parametric_sweep.generate( job = ds.jobs.parametric_sweep.submit( "/MyData/opensees_sweep/", - app_id="openseespy-s3", + app_id="designsafe-agnostic-app", allocation="your_allocation", node_count=2, cores_per_node=48, @@ -127,5 +127,5 @@ $WORK/sweep_$SLURM_JOB_ID/run_ALPHA_BETA ## Notes - **PyLauncher is NOT a dapi dependency** — it's pre-installed on TACC compute nodes. dapi only generates the input files. -- **MPI is disabled** — PyLauncher's `ClassicLauncher` runs independent serial tasks. The apps used (`agnostic`, `openseespy-s3`) already have `isMpi: false`. +- **MPI is disabled** — PyLauncher's `ClassicLauncher` runs independent serial tasks. The `designsafe-agnostic-app` already has `isMpi: false`. - **Works with any app** — OpenSees, Python, MATLAB, Fortran binaries. The task list is just shell commands. diff --git a/docs/jobs.md b/docs/jobs.md index 277dc8c..d380e13 100644 --- a/docs/jobs.md +++ b/docs/jobs.md @@ -95,12 +95,20 @@ print(f"Default Cores: {app_details.jobAttributes.coresPerNode}") | Application | App ID | Description | |-------------|--------|-------------| +| Agnostic | `designsafe-agnostic-app` | General-purpose Python/OpenSees/PyLauncher execution | | MATLAB | `matlab-r2023a` | MATLAB computational environment | | OpenSees | `opensees-express` | Structural analysis framework | +| OpenSees MP | `opensees-mp-s3` | OpenSees parallel (MPI) analysis | | MPM | `mpm-s3` | Material Point Method simulations | | ADCIRC | `adcirc-v55` | Coastal circulation modeling | | LS-DYNA | `ls-dyna` | Explicit finite element analysis | +The **Agnostic App** (`designsafe-agnostic-app`) is DesignSafe's general-purpose app for running Python scripts, OpenSeesPy, and PyLauncher parameter sweeps on TACC systems. It supports: +- Python 3.12 with OpenSeesPy pre-installed +- PyLauncher for running many independent tasks in a single allocation +- Configurable TACC module loading +- Serial execution (`isMpi: false`) — ideal for PyLauncher workflows + ## Job Submission ### Basic Job Submission @@ -461,7 +469,7 @@ ds.jobs.parametric_sweep.generate( # Submit the job job = ds.jobs.parametric_sweep.submit( "/MyData/sweep_demo/", - app_id="agnostic", + app_id="designsafe-agnostic-app", allocation="your_allocation", node_count=1, cores_per_node=48, diff --git a/examples/pylauncher_sweep.ipynb b/examples/pylauncher_sweep.ipynb index f51b384..35e65de 100644 --- a/examples/pylauncher_sweep.ipynb +++ b/examples/pylauncher_sweep.ipynb @@ -3,20 +3,7 @@ { "cell_type": "markdown", "metadata": {}, - "source": [ - "# PyLauncher Parameter Sweeps with dapi\n", - "\n", - "This notebook demonstrates how to use dapi's parameter sweep utilities to generate\n", - "PyLauncher task lists and submit sweep jobs on DesignSafe.\n", - "\n", - "**PyLauncher** runs many independent serial tasks within a single SLURM allocation —\n", - "ideal for parameter studies, Monte Carlo simulations, and batch processing.\n", - "\n", - "**What this notebook covers:**\n", - "\n", - "1. **Generic demo** — a minimal `simulate.py` with `--alpha`/`--beta` parameters\n", - "2. **OpenSees demo** — Silvia Mazzoni's cantilever pushover with `--NodalMass`/`--LCol` sweep" - ] + "source": "# PyLauncher Parameter Sweeps with dapi\n\nThis notebook demonstrates how to use dapi's parameter sweep utilities to generate\nPyLauncher task lists and submit sweep jobs on DesignSafe.\n\n**PyLauncher** runs many independent serial tasks within a single SLURM allocation —\nideal for parameter studies, Monte Carlo simulations, and batch processing.\n\n**What this notebook covers:**\n\n1. **Generic demo** — a minimal `simulate.py` with `--alpha`/`--beta` parameters\n2. **OpenSees demo** — cantilever pushover with `--NodalMass`/`--LCol` sweep" }, { "cell_type": "code", @@ -34,9 +21,14 @@ "outputs": [], "source": [ "import os\n", + "from pathlib import Path\n", "from dapi import DSClient\n", "\n", - "ds = DSClient()" + "ds = DSClient()\n", + "\n", + "# On DesignSafe JupyterHub, ~/MyData is /home/jupyter/MyData.\n", + "# Locally, we use a local directory but the Tapis path stays /MyData/...\n", + "MYDATA = Path(os.environ.get(\"JUPYTER_SERVER_ROOT\", os.path.expanduser(\"~\"))) / \"MyData\"" ] }, { @@ -49,7 +41,6 @@ "\n", "A simple example sweeping over two parameters (`--alpha`, `--beta`). The script\n", "computes `result = alpha * beta`, writes it to a JSON output file, and prints a summary.\n", - "This pattern works with any app — the commands in `runsList.txt` are just shell commands.\n", "\n", "### Write the script" ] @@ -60,8 +51,8 @@ "metadata": {}, "outputs": [], "source": [ - "input_dir_generic = os.path.expanduser(\"~/MyData/pylauncher_demo/\")\n", - "os.makedirs(input_dir_generic, exist_ok=True)\n", + "input_dir_generic = MYDATA / \"pylauncher_demo\"\n", + "input_dir_generic.mkdir(parents=True, exist_ok=True)\n", "\n", "simulate_script = '''\\\n", "\"\"\"simulate.py — minimal demo script for PyLauncher parameter sweeps.\n", @@ -91,10 +82,8 @@ "print(f\"alpha={args.alpha}, beta={args.beta} -> result={result:.4f} written to {outfile}\")\n", "'''\n", "\n", - "with open(os.path.join(input_dir_generic, \"simulate.py\"), \"w\") as f:\n", - " f.write(simulate_script)\n", - "\n", - "print(f\"Wrote {input_dir_generic}simulate.py\")" + "(input_dir_generic / \"simulate.py\").write_text(simulate_script)\n", + "print(f\"Wrote {input_dir_generic}/simulate.py\")" ] }, { @@ -152,18 +141,16 @@ "commands = ds.jobs.parametric_sweep.generate(\n", " \"python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA\",\n", " sweep,\n", - " input_dir_generic,\n", + " str(input_dir_generic),\n", " debug=\"host+job\",\n", ")\n", "\n", "print(f\"Generated {len(commands)} task commands\\n\")\n", "print(\"=== runsList.txt ===\")\n", - "with open(os.path.join(input_dir_generic, \"runsList.txt\")) as f:\n", - " print(f.read())\n", + "print((input_dir_generic / \"runsList.txt\").read_text())\n", "\n", "print(\"=== call_pylauncher.py ===\")\n", - "with open(os.path.join(input_dir_generic, \"call_pylauncher.py\")) as f:\n", - " print(f.read())\n", + "print((input_dir_generic / \"call_pylauncher.py\").read_text())\n", "\n", "print(\"=== Files in input directory ===\")\n", "for fn in sorted(os.listdir(input_dir_generic)):\n", @@ -187,7 +174,7 @@ "source": [ "# job = ds.jobs.parametric_sweep.submit(\n", "# \"/MyData/pylauncher_demo/\",\n", - "# app_id=\"agnostic\",\n", + "# app_id=\"designsafe-agnostic-app\",\n", "# allocation=\"your_allocation\",\n", "# node_count=1,\n", "# cores_per_node=48,\n", @@ -199,131 +186,14 @@ { "cell_type": "markdown", "metadata": {}, - "source": [ - "---\n", - "\n", - "## Part 2: OpenSees Cantilever Pushover Sweep\n", - "\n", - "A real-world example based on Silvia Mazzoni's cantilever pushover analysis.\n", - "We sweep over `NodalMass` and `LCol` (column length) to study how these structural\n", - "parameters affect the pushover response.\n", - "\n", - "The cantilever model:\n", - "```\n", - " ^Y\n", - " |\n", - " 2 __\n", - " | |\n", - " | |\n", - " | |\n", - " (1) LCol\n", - " | |\n", - " | |\n", - " | |\n", - " =1= ---- -------->X\n", - "```\n", - "\n", - "- Node 1: fixed base\n", - "- Node 2: free top with `NodalMass`\n", - "- Elastic beam-column element\n", - "- Gravity load (2000 kip downward) followed by lateral pushover (displacement-controlled)\n", - "\n", - "### Write the analysis script\n", - "\n", - "This is the OpenSeesPy cantilever pushover script adapted from\n", - "[Silvia Mazzoni's example](https://opensees.berkeley.edu/wiki/index.php/Examples_Manual).\n", - "It accepts `--NodalMass`, `--LCol`, and `--outDir` as command-line arguments\n", - "so PyLauncher can run each parameter combination independently." - ] + "source": "---\n\n## Part 2: OpenSees Cantilever Pushover Sweep\n\nA real-world example using a 2D cantilever pushover analysis.\nWe sweep over `NodalMass` and `LCol` (column length) to study how these structural\nparameters affect the pushover response.\n\nThe cantilever model:\n```\n ^Y\n |\n 2 __\n | |\n | |\n | |\n (1) LCol\n | |\n | |\n | |\n =1= ---- -------->X\n```\n\n- Node 1: fixed base\n- Node 2: free top with `NodalMass`\n- Elastic beam-column element\n- Gravity load (2000 kip downward) followed by lateral pushover (displacement-controlled)\n\n### Write the analysis script\n\nAn OpenSeesPy cantilever pushover script based on the\n[OpenSees Examples Manual](https://opensees.berkeley.edu/wiki/index.php/Examples_Manual).\nIt accepts `--NodalMass`, `--LCol`, and `--outDir` as command-line arguments\nso PyLauncher can run each parameter combination independently." }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "input_dir_opensees = os.path.expanduser(\"~/MyData/opensees_sweep/\")\n", - "os.makedirs(input_dir_opensees, exist_ok=True)\n", - "\n", - "cantilever_script = \"\"\"\\\n", - "# Ex1a.Canti2D.Push — OpenSeesPy cantilever pushover\n", - "# Adapted from Silvia Mazzoni & Frank McKenna, 2006/2020\n", - "# Units: kip, inch, second\n", - "#\n", - "# Command-line arguments (set by PyLauncher per task):\n", - "# --NodalMass mass at free node\n", - "# --LCol column length\n", - "# --outDir output directory for this run\n", - "\n", - "import argparse\n", - "import os\n", - "\n", - "if os.path.exists(\"opensees.so\"):\n", - " import opensees as ops\n", - "else:\n", - " import openseespy.opensees as ops\n", - "\n", - "parser = argparse.ArgumentParser()\n", - "parser.add_argument(\"--NodalMass\", type=float, required=True)\n", - "parser.add_argument(\"--LCol\", type=float, required=True)\n", - "parser.add_argument(\"--outDir\", type=str, required=True)\n", - "args = parser.parse_args()\n", - "\n", - "NodalMass = args.NodalMass\n", - "LCol = args.LCol\n", - "outDir = args.outDir\n", - "\n", - "os.makedirs(outDir, exist_ok=True)\n", - "print(f\"Running: NodalMass={NodalMass}, LCol={LCol}, outDir={outDir}\")\n", - "\n", - "ops.wipe()\n", - "ops.model(\"basic\", \"-ndm\", 2, \"-ndf\", 3)\n", - "\n", - "# Geometry\n", - "ops.node(1, 0, 0)\n", - "ops.node(2, 0, LCol)\n", - "ops.fix(1, 1, 1, 1)\n", - "ops.mass(2, NodalMass, 0.0, 0.0)\n", - "\n", - "# Element\n", - "ops.geomTransf(\"Linear\", 1)\n", - "ops.element(\"elasticBeamColumn\", 1, 1, 2, 3600000000, 4227, 1080000, 1)\n", - "\n", - "# Recorders\n", - "ops.recorder(\"Node\", \"-file\", f\"{outDir}/DFree.out\", \"-time\", \"-node\", 2, \"-dof\", 1, 2, 3, \"disp\")\n", - "ops.recorder(\"Node\", \"-file\", f\"{outDir}/RBase.out\", \"-time\", \"-node\", 1, \"-dof\", 1, 2, 3, \"reaction\")\n", - "ops.recorder(\"Element\", \"-file\", f\"{outDir}/FCol.out\", \"-time\", \"-ele\", 1, \"globalForce\")\n", - "\n", - "# Gravity analysis\n", - "ops.timeSeries(\"Linear\", 1)\n", - "ops.pattern(\"Plain\", 1, 1)\n", - "ops.load(2, 0.0, -2000.0, 0.0)\n", - "ops.wipeAnalysis()\n", - "ops.constraints(\"Plain\")\n", - "ops.numberer(\"Plain\")\n", - "ops.system(\"BandGeneral\")\n", - "ops.test(\"NormDispIncr\", 1.0e-8, 6)\n", - "ops.algorithm(\"Newton\")\n", - "ops.integrator(\"LoadControl\", 0.1)\n", - "ops.analysis(\"Static\")\n", - "ops.analyze(10)\n", - "ops.loadConst(\"-time\", 0.0)\n", - "\n", - "# Pushover analysis\n", - "ops.timeSeries(\"Linear\", 2)\n", - "ops.pattern(\"Plain\", 2, 2)\n", - "ops.load(2, 2000.0, 0.0, 0.0)\n", - "ops.integrator(\"DisplacementControl\", 2, 1, 0.1)\n", - "ops.analyze(1000)\n", - "\n", - "print(f\"Done: NodalMass={NodalMass}, LCol={LCol}\")\n", - "\"\"\"\n", - "\n", - "with open(os.path.join(input_dir_opensees, \"cantilever.py\"), \"w\") as f:\n", - " f.write(cantilever_script)\n", - "\n", - "print(f\"Wrote {input_dir_opensees}cantilever.py\")" - ] + "source": "input_dir_opensees = MYDATA / \"opensees_sweep\"\ninput_dir_opensees.mkdir(parents=True, exist_ok=True)\n\ncantilever_script = '''\\\n# Ex1a.Canti2D.Push — OpenSeesPy cantilever pushover\n# Based on the OpenSees Examples Manual\n# Units: kip, inch, second\n#\n# Command-line arguments (set by PyLauncher per task):\n# --NodalMass mass at free node\n# --LCol column length\n# --outDir output directory for this run\n\nimport argparse\nimport os\n\nif os.path.exists(\"opensees.so\"):\n import opensees as ops\nelse:\n import openseespy.opensees as ops\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--NodalMass\", type=float, required=True)\nparser.add_argument(\"--LCol\", type=float, required=True)\nparser.add_argument(\"--outDir\", type=str, required=True)\nargs = parser.parse_args()\n\nNodalMass = args.NodalMass\nLCol = args.LCol\noutDir = args.outDir\n\nos.makedirs(outDir, exist_ok=True)\nprint(f\"Running: NodalMass={NodalMass}, LCol={LCol}, outDir={outDir}\")\n\nops.wipe()\nops.model(\"basic\", \"-ndm\", 2, \"-ndf\", 3)\n\n# Geometry\nops.node(1, 0, 0)\nops.node(2, 0, LCol)\nops.fix(1, 1, 1, 1)\nops.mass(2, NodalMass, 0.0, 0.0)\n\n# Element\nops.geomTransf(\"Linear\", 1)\nops.element(\"elasticBeamColumn\", 1, 1, 2, 3600000000, 4227, 1080000, 1)\n\n# Recorders\nops.recorder(\"Node\", \"-file\", f\"{outDir}/DFree.out\", \"-time\", \"-node\", 2, \"-dof\", 1, 2, 3, \"disp\")\nops.recorder(\"Node\", \"-file\", f\"{outDir}/RBase.out\", \"-time\", \"-node\", 1, \"-dof\", 1, 2, 3, \"reaction\")\nops.recorder(\"Element\", \"-file\", f\"{outDir}/FCol.out\", \"-time\", \"-ele\", 1, \"globalForce\")\n\n# Gravity analysis\nops.timeSeries(\"Linear\", 1)\nops.pattern(\"Plain\", 1, 1)\nops.load(2, 0.0, -2000.0, 0.0)\nops.wipeAnalysis()\nops.constraints(\"Plain\")\nops.numberer(\"Plain\")\nops.system(\"BandGeneral\")\nops.test(\"NormDispIncr\", 1.0e-8, 6)\nops.algorithm(\"Newton\")\nops.integrator(\"LoadControl\", 0.1)\nops.analysis(\"Static\")\nops.analyze(10)\nops.loadConst(\"-time\", 0.0)\n\n# Pushover analysis\nops.timeSeries(\"Linear\", 2)\nops.pattern(\"Plain\", 2, 2)\nops.load(2, 2000.0, 0.0, 0.0)\nops.integrator(\"DisplacementControl\", 2, 1, 0.1)\nops.analyze(1000)\n\nprint(f\"Done: NodalMass={NodalMass}, LCol={LCol}\")\n'''\n\n(input_dir_opensees / \"cantilever.py\").write_text(cantilever_script)\nprint(f\"Wrote {input_dir_opensees}/cantilever.py\")" }, { "cell_type": "markdown", @@ -382,17 +252,15 @@ "commands = ds.jobs.parametric_sweep.generate(\n", " \"python3 cantilever.py --NodalMass NODAL_MASS --LCol LCOL --outDir out_NODAL_MASS_LCOL\",\n", " sweep_opensees,\n", - " input_dir_opensees,\n", + " str(input_dir_opensees),\n", ")\n", "\n", "print(f\"Generated {len(commands)} task commands\\n\")\n", "print(\"=== runsList.txt ===\")\n", - "with open(os.path.join(input_dir_opensees, \"runsList.txt\")) as f:\n", - " print(f.read())\n", + "print((input_dir_opensees / \"runsList.txt\").read_text())\n", "\n", "print(\"=== call_pylauncher.py ===\")\n", - "with open(os.path.join(input_dir_opensees, \"call_pylauncher.py\")) as f:\n", - " print(f.read())\n", + "print((input_dir_opensees / \"call_pylauncher.py\").read_text())\n", "\n", "print(\"=== Files in input directory ===\")\n", "for fn in sorted(os.listdir(input_dir_opensees)):\n", @@ -416,7 +284,7 @@ "source": [ "# job = ds.jobs.parametric_sweep.submit(\n", "# \"/MyData/opensees_sweep/\",\n", - "# app_id=\"openseespy-s3\",\n", + "# app_id=\"designsafe-agnostic-app\",\n", "# allocation=\"your_allocation\",\n", "# node_count=1,\n", "# cores_per_node=48,\n", @@ -439,4 +307,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file From dcd3af976eb2790c76ea993240341386e6144d93 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Thu, 19 Mar 2026 15:12:19 -0500 Subject: [PATCH 17/21] Version update to 0.5.0 --- dapi/__init__.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dapi/__init__.py b/dapi/__init__.py index 74eef04..0f03deb 100644 --- a/dapi/__init__.py +++ b/dapi/__init__.py @@ -79,7 +79,7 @@ TAPIS_TERMINAL_STATES, ) -__version__ = "0.4.5" +__version__ = "0.5.0" __all__ = [ "DSClient", diff --git a/pyproject.toml b/pyproject.toml index c052b51..671ee79 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dapi" -version = "0.4.9" +version = "0.5.0" description = "DesignSafe API" readme = "README.md" license = "MIT" From c4440f65f6f69bd47bed46affccf2052faedf312 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Fri, 20 Mar 2026 05:03:23 -0500 Subject: [PATCH 18/21] Ruff format pylauncher notebook --- examples/pylauncher_sweep.ipynb | 82 ++++++++++++++++++++++++++++++++- 1 file changed, 81 insertions(+), 1 deletion(-) diff --git a/examples/pylauncher_sweep.ipynb b/examples/pylauncher_sweep.ipynb index 35e65de..7a8156e 100644 --- a/examples/pylauncher_sweep.ipynb +++ b/examples/pylauncher_sweep.ipynb @@ -193,7 +193,87 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "input_dir_opensees = MYDATA / \"opensees_sweep\"\ninput_dir_opensees.mkdir(parents=True, exist_ok=True)\n\ncantilever_script = '''\\\n# Ex1a.Canti2D.Push — OpenSeesPy cantilever pushover\n# Based on the OpenSees Examples Manual\n# Units: kip, inch, second\n#\n# Command-line arguments (set by PyLauncher per task):\n# --NodalMass mass at free node\n# --LCol column length\n# --outDir output directory for this run\n\nimport argparse\nimport os\n\nif os.path.exists(\"opensees.so\"):\n import opensees as ops\nelse:\n import openseespy.opensees as ops\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--NodalMass\", type=float, required=True)\nparser.add_argument(\"--LCol\", type=float, required=True)\nparser.add_argument(\"--outDir\", type=str, required=True)\nargs = parser.parse_args()\n\nNodalMass = args.NodalMass\nLCol = args.LCol\noutDir = args.outDir\n\nos.makedirs(outDir, exist_ok=True)\nprint(f\"Running: NodalMass={NodalMass}, LCol={LCol}, outDir={outDir}\")\n\nops.wipe()\nops.model(\"basic\", \"-ndm\", 2, \"-ndf\", 3)\n\n# Geometry\nops.node(1, 0, 0)\nops.node(2, 0, LCol)\nops.fix(1, 1, 1, 1)\nops.mass(2, NodalMass, 0.0, 0.0)\n\n# Element\nops.geomTransf(\"Linear\", 1)\nops.element(\"elasticBeamColumn\", 1, 1, 2, 3600000000, 4227, 1080000, 1)\n\n# Recorders\nops.recorder(\"Node\", \"-file\", f\"{outDir}/DFree.out\", \"-time\", \"-node\", 2, \"-dof\", 1, 2, 3, \"disp\")\nops.recorder(\"Node\", \"-file\", f\"{outDir}/RBase.out\", \"-time\", \"-node\", 1, \"-dof\", 1, 2, 3, \"reaction\")\nops.recorder(\"Element\", \"-file\", f\"{outDir}/FCol.out\", \"-time\", \"-ele\", 1, \"globalForce\")\n\n# Gravity analysis\nops.timeSeries(\"Linear\", 1)\nops.pattern(\"Plain\", 1, 1)\nops.load(2, 0.0, -2000.0, 0.0)\nops.wipeAnalysis()\nops.constraints(\"Plain\")\nops.numberer(\"Plain\")\nops.system(\"BandGeneral\")\nops.test(\"NormDispIncr\", 1.0e-8, 6)\nops.algorithm(\"Newton\")\nops.integrator(\"LoadControl\", 0.1)\nops.analysis(\"Static\")\nops.analyze(10)\nops.loadConst(\"-time\", 0.0)\n\n# Pushover analysis\nops.timeSeries(\"Linear\", 2)\nops.pattern(\"Plain\", 2, 2)\nops.load(2, 2000.0, 0.0, 0.0)\nops.integrator(\"DisplacementControl\", 2, 1, 0.1)\nops.analyze(1000)\n\nprint(f\"Done: NodalMass={NodalMass}, LCol={LCol}\")\n'''\n\n(input_dir_opensees / \"cantilever.py\").write_text(cantilever_script)\nprint(f\"Wrote {input_dir_opensees}/cantilever.py\")" + "source": [ + "input_dir_opensees = MYDATA / \"opensees_sweep\"\n", + "input_dir_opensees.mkdir(parents=True, exist_ok=True)\n", + "\n", + "cantilever_script = \"\"\"\\\n", + "# Ex1a.Canti2D.Push — OpenSeesPy cantilever pushover\n", + "# Based on the OpenSees Examples Manual\n", + "# Units: kip, inch, second\n", + "#\n", + "# Command-line arguments (set by PyLauncher per task):\n", + "# --NodalMass mass at free node\n", + "# --LCol column length\n", + "# --outDir output directory for this run\n", + "\n", + "import argparse\n", + "import os\n", + "\n", + "if os.path.exists(\"opensees.so\"):\n", + " import opensees as ops\n", + "else:\n", + " import openseespy.opensees as ops\n", + "\n", + "parser = argparse.ArgumentParser()\n", + "parser.add_argument(\"--NodalMass\", type=float, required=True)\n", + "parser.add_argument(\"--LCol\", type=float, required=True)\n", + "parser.add_argument(\"--outDir\", type=str, required=True)\n", + "args = parser.parse_args()\n", + "\n", + "NodalMass = args.NodalMass\n", + "LCol = args.LCol\n", + "outDir = args.outDir\n", + "\n", + "os.makedirs(outDir, exist_ok=True)\n", + "print(f\"Running: NodalMass={NodalMass}, LCol={LCol}, outDir={outDir}\")\n", + "\n", + "ops.wipe()\n", + "ops.model(\"basic\", \"-ndm\", 2, \"-ndf\", 3)\n", + "\n", + "# Geometry\n", + "ops.node(1, 0, 0)\n", + "ops.node(2, 0, LCol)\n", + "ops.fix(1, 1, 1, 1)\n", + "ops.mass(2, NodalMass, 0.0, 0.0)\n", + "\n", + "# Element\n", + "ops.geomTransf(\"Linear\", 1)\n", + "ops.element(\"elasticBeamColumn\", 1, 1, 2, 3600000000, 4227, 1080000, 1)\n", + "\n", + "# Recorders\n", + "ops.recorder(\"Node\", \"-file\", f\"{outDir}/DFree.out\", \"-time\", \"-node\", 2, \"-dof\", 1, 2, 3, \"disp\")\n", + "ops.recorder(\"Node\", \"-file\", f\"{outDir}/RBase.out\", \"-time\", \"-node\", 1, \"-dof\", 1, 2, 3, \"reaction\")\n", + "ops.recorder(\"Element\", \"-file\", f\"{outDir}/FCol.out\", \"-time\", \"-ele\", 1, \"globalForce\")\n", + "\n", + "# Gravity analysis\n", + "ops.timeSeries(\"Linear\", 1)\n", + "ops.pattern(\"Plain\", 1, 1)\n", + "ops.load(2, 0.0, -2000.0, 0.0)\n", + "ops.wipeAnalysis()\n", + "ops.constraints(\"Plain\")\n", + "ops.numberer(\"Plain\")\n", + "ops.system(\"BandGeneral\")\n", + "ops.test(\"NormDispIncr\", 1.0e-8, 6)\n", + "ops.algorithm(\"Newton\")\n", + "ops.integrator(\"LoadControl\", 0.1)\n", + "ops.analysis(\"Static\")\n", + "ops.analyze(10)\n", + "ops.loadConst(\"-time\", 0.0)\n", + "\n", + "# Pushover analysis\n", + "ops.timeSeries(\"Linear\", 2)\n", + "ops.pattern(\"Plain\", 2, 2)\n", + "ops.load(2, 2000.0, 0.0, 0.0)\n", + "ops.integrator(\"DisplacementControl\", 2, 1, 0.1)\n", + "ops.analyze(1000)\n", + "\n", + "print(f\"Done: NodalMass={NodalMass}, LCol={LCol}\")\n", + "\"\"\"\n", + "\n", + "(input_dir_opensees / \"cantilever.py\").write_text(cantilever_script)\n", + "print(f\"Wrote {input_dir_opensees}/cantilever.py\")" + ] }, { "cell_type": "markdown", From 96165d8600c45913d860b7637fa3b94e4e0faf20 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Sun, 22 Mar 2026 09:01:30 -0500 Subject: [PATCH 19/21] Fixes #3 TMS support automatically on init client --- docs/examples.md | 2 +- docs/examples/pylauncher.md | 2 +- examples/apps.ipynb | 36 +- examples/db.ipynb | 36 +- examples/mpm/mpm-minimal.ipynb | 46 +-- examples/mpm/mpm.ipynb | 75 +--- examples/openfoam/openfoam-minimal.ipynb | 42 +- examples/openfoam/openfoam.ipynb | 32 +- examples/opensees/OpenSeesMP-dapi.ipynb | 45 +- examples/pylauncher/pylauncher_opensees.ipynb | 273 ++++++++++++ examples/pylauncher/pylauncher_sweep.ipynb | 206 +++++++++ examples/pylauncher_sweep.ipynb | 390 ------------------ 12 files changed, 490 insertions(+), 695 deletions(-) create mode 100644 examples/pylauncher/pylauncher_opensees.ipynb create mode 100644 examples/pylauncher/pylauncher_sweep.ipynb delete mode 100644 examples/pylauncher_sweep.ipynb diff --git a/docs/examples.md b/docs/examples.md index b71ed5e..56a87c8 100644 --- a/docs/examples.md +++ b/docs/examples.md @@ -47,7 +47,7 @@ Submit and monitor MPM simulations for large deformation problems. ### PyLauncher Parameter Sweeps Run many independent tasks within a single SLURM allocation using PyLauncher. -[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/pylauncher_sweep.ipynb) +[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/pylauncher/pylauncher_sweep.ipynb) **What you'll learn:** - Generating parameter sweep commands from templates diff --git a/docs/examples/pylauncher.md b/docs/examples/pylauncher.md index 5b6fba3..c139042 100644 --- a/docs/examples/pylauncher.md +++ b/docs/examples/pylauncher.md @@ -2,7 +2,7 @@ Run many independent tasks within a single SLURM allocation using [PyLauncher](https://github.com/TACC/pylauncher) and dapi's parameter sweep utilities. -[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/pylauncher_sweep.ipynb) +[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/pylauncher/pylauncher_sweep.ipynb) ## When to Use PyLauncher diff --git a/examples/apps.ipynb b/examples/apps.ipynb index 72b4079..2632a38 100644 --- a/examples/apps.ipynb +++ b/examples/apps.ipynb @@ -22,39 +22,7 @@ "id": "839fa332-70a6-4818-a190-18c9ca109c28", "metadata": {}, "outputs": [], - "source": [ - "%pip install --user --no-cache-dir --force-reinstall --no-warn-script-location --no-deps --quiet dapi\n", - "%pip install --user --quiet setuptools\n", - "%pip install --user --no-warn-script-location --quiet dapi" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7bc3ba6c", - "metadata": {}, - "outputs": [], - "source": [ - "import sys\n", - "import os\n", - "\n", - "print(\n", - " \"Old NumPy 1.24 installed!\"\n", - " if sys.version_info < (3, 10)\n", - " and os.system(\"pip install --user --force-reinstall numpy~=1.24.0 --quiet\") == 0\n", - " else \"Skipped (Python 3.10+)\"\n", - " if sys.version_info >= (3, 10)\n", - " else \"Install failed!\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "bf12cbe6", - "metadata": {}, - "source": [ - "### ⚠️ Please restart the kernel by choosing Kernel >> Restart kernel ⚠️" - ] + "source": "%pip install dapi --quiet" }, { "cell_type": "code", @@ -327,4 +295,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/examples/db.ipynb b/examples/db.ipynb index b7e8053..90ce5b4 100644 --- a/examples/db.ipynb +++ b/examples/db.ipynb @@ -14,39 +14,7 @@ "id": "5a76c77b-0078-48fc-ade6-e46cbac010dd", "metadata": {}, "outputs": [], - "source": [ - "%pip install --user --no-cache-dir --force-reinstall --no-warn-script-location --no-deps --quiet dapi\n", - "%pip install --user --quiet setuptools\n", - "%pip install --user --no-warn-script-location --quiet dapi" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "da1802ad", - "metadata": {}, - "outputs": [], - "source": [ - "import sys\n", - "import os\n", - "\n", - "print(\n", - " \"Old NumPy 1.24 installed!\"\n", - " if sys.version_info < (3, 10)\n", - " and os.system(\"pip install --user --force-reinstall numpy~=1.24.0 --quiet\") == 0\n", - " else \"Skipped (Python 3.10+)\"\n", - " if sys.version_info >= (3, 10)\n", - " else \"Install failed!\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "c3ade4b2", - "metadata": {}, - "source": [ - "### ⚠️ Please restart the kernel by choosing Kernel >> Restart kernel ⚠️" - ] + "source": "%pip install dapi --quiet" }, { "cell_type": "code", @@ -208,4 +176,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/examples/mpm/mpm-minimal.ipynb b/examples/mpm/mpm-minimal.ipynb index 149c715..bc2713a 100644 --- a/examples/mpm/mpm-minimal.ipynb +++ b/examples/mpm/mpm-minimal.ipynb @@ -21,52 +21,8 @@ "execution_count": null, "id": "8ed2d128", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", - "poetry 2.1.3 requires importlib-metadata<8.7,>=4.4; python_version < \"3.10\", but you have importlib-metadata 8.7.0 which is incompatible.\u001b[0m\u001b[31m\n", - "\u001b[0m\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m25.1.1\u001b[0m\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n" - ] - } - ], - "source": [ - "%pip install --user --no-cache-dir --force-reinstall --no-warn-script-location --no-deps --quiet dapi\n", - "%pip install --user --quiet setuptools\n", - "%pip install --user --no-warn-script-location --quiet dapi" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "64e3f3a3", - "metadata": {}, "outputs": [], - "source": [ - "import sys\n", - "import os\n", - "\n", - "print(\n", - " \"Old NumPy 1.24 installed!\"\n", - " if sys.version_info < (3, 10)\n", - " and os.system(\"pip install --user --force-reinstall numpy~=1.24.0 --quiet\") == 0\n", - " else \"Skipped (Python 3.10+)\"\n", - " if sys.version_info >= (3, 10)\n", - " else \"Install failed!\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "b9fe1a7a", - "metadata": {}, - "source": [ - "### ⚠️ Please restart the kernel by choosing Kernel >> Restart kernel ⚠️" - ] + "source": "%pip install dapi --quiet" }, { "cell_type": "code", diff --git a/examples/mpm/mpm.ipynb b/examples/mpm/mpm.ipynb index 29ab102..1d87046 100644 --- a/examples/mpm/mpm.ipynb +++ b/examples/mpm/mpm.ipynb @@ -22,80 +22,7 @@ "id": "dabd7715", "metadata": {}, "outputs": [], - "source": [ - "%pip install --user --no-cache-dir --force-reinstall --no-warn-script-location --no-deps --quiet dapi\n", - "%pip install --user --quiet setuptools\n", - "%pip install --user --no-warn-script-location --quiet dapi" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c318e131", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Old NumPy 1.24 installed!\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m25.1.1\u001b[0m\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n" - ] - } - ], - "source": [ - "import sys\n", - "import os\n", - "\n", - "print(\n", - " \"Old NumPy 1.24 installed!\"\n", - " if sys.version_info < (3, 10)\n", - " and os.system(\"pip install --user --force-reinstall numpy~=1.24.0 --quiet\") == 0\n", - " else \"Skipped (Python 3.10+)\"\n", - " if sys.version_info >= (3, 10)\n", - " else \"Install failed!\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "1d36c1fb", - "metadata": {}, - "source": [ - "### ⚠️ Please restart the kernel by choosing Kernel >> Restart kernel ⚠️" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "8186b2c9", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "NumPy version: 2.0.2\n", - "DAPI version: unknown\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "\n", - "print(\"NumPy version:\", np.__version__)\n", - "import dapi\n", - "\n", - "print(\"DAPI version:\", dapi.__version__)" - ] + "source": "%pip install dapi --quiet" }, { "cell_type": "code", diff --git a/examples/openfoam/openfoam-minimal.ipynb b/examples/openfoam/openfoam-minimal.ipynb index aeaf74d..0abc25c 100644 --- a/examples/openfoam/openfoam-minimal.ipynb +++ b/examples/openfoam/openfoam-minimal.ipynb @@ -19,47 +19,7 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "%pip install --user --no-cache-dir --force-reinstall --no-warn-script-location --no-deps --quiet dapi\n", - "%pip install --user --quiet setuptools\n", - "%pip install --user --no-warn-script-location --quiet dapi" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found existing installation: dapi 0.3.0\n", - "Uninstalling dapi-0.3.0:\n", - " Successfully uninstalled dapi-0.3.0\n" - ] - } - ], - "source": [ - "import sys\n", - "import os\n", - "\n", - "print(\n", - " \"Old NumPy 1.24 installed!\"\n", - " if sys.version_info < (3, 10)\n", - " and os.system(\"pip install --user --force-reinstall numpy~=1.24.0 --quiet\") == 0\n", - " else \"Skipped (Python 3.10+)\"\n", - " if sys.version_info >= (3, 10)\n", - " else \"Install failed!\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### ⚠️ Please restart the kernel by choosing Kernel >> Restart kernel ⚠️" - ] + "source": "%pip install dapi --quiet" }, { "cell_type": "code", diff --git a/examples/openfoam/openfoam.ipynb b/examples/openfoam/openfoam.ipynb index c56ebcd..2598a6e 100644 --- a/examples/openfoam/openfoam.ipynb +++ b/examples/openfoam/openfoam.ipynb @@ -20,37 +20,7 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "%pip install --user --no-cache-dir --force-reinstall --no-warn-script-location --no-deps --quiet dapi\n", - "%pip install --user --quiet setuptools\n", - "%pip install --user --no-warn-script-location --quiet dapi" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import sys\n", - "import os\n", - "\n", - "print(\n", - " \"Old NumPy 1.24 installed!\"\n", - " if sys.version_info < (3, 10)\n", - " and os.system(\"pip install --user --force-reinstall numpy~=1.24.0 --quiet\") == 0\n", - " else \"Skipped (Python 3.10+)\"\n", - " if sys.version_info >= (3, 10)\n", - " else \"Install failed!\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### ⚠️ Please restart the kernel by choosing Kernel >> Restart kernel ⚠️" - ] + "source": "%pip install dapi --quiet" }, { "cell_type": "markdown", diff --git a/examples/opensees/OpenSeesMP-dapi.ipynb b/examples/opensees/OpenSeesMP-dapi.ipynb index 98e2085..f195ef1 100644 --- a/examples/opensees/OpenSeesMP-dapi.ipynb +++ b/examples/opensees/OpenSeesMP-dapi.ipynb @@ -40,50 +40,7 @@ "id": "60255c5a", "metadata": {}, "outputs": [], - "source": [ - "%pip install --user --no-cache-dir --force-reinstall --no-warn-script-location --no-deps --quiet dapi\n", - "%pip install --user --quiet setuptools\n", - "%pip install --user --no-warn-script-location --quiet dapi" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "cell-4", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found existing installation: dapi 0.3.0\n", - "Uninstalling dapi-0.3.0:\n", - " Successfully uninstalled dapi-0.3.0\n", - "Requirement already satisfied: numpy in /opt/conda/lib/python3.12/site-packages (2.2.6)\n" - ] - } - ], - "source": [ - "import sys\n", - "import os\n", - "\n", - "print(\n", - " \"Old NumPy 1.24 installed!\"\n", - " if sys.version_info < (3, 10)\n", - " and os.system(\"pip install --user --force-reinstall numpy~=1.24.0 --quiet\") == 0\n", - " else \"Skipped (Python 3.10+)\"\n", - " if sys.version_info >= (3, 10)\n", - " else \"Install failed!\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "a2680e64", - "metadata": {}, - "source": [ - "### ⚠️ Please restart the kernel by choosing Kernel >> Restart kernel ⚠️" - ] + "source": "%pip install dapi --quiet" }, { "cell_type": "code", diff --git a/examples/pylauncher/pylauncher_opensees.ipynb b/examples/pylauncher/pylauncher_opensees.ipynb new file mode 100644 index 0000000..6ad1710 --- /dev/null +++ b/examples/pylauncher/pylauncher_opensees.ipynb @@ -0,0 +1,273 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# OpenSees Cantilever Pushover — PyLauncher Sweep\n", + "\n", + "This notebook runs a parameter sweep over a 2D cantilever pushover analysis using\n", + "PyLauncher and dapi. We sweep over `NodalMass` and `LCol` (column length) to study\n", + "how these structural parameters affect the pushover response.\n", + "\n", + "The cantilever model:\n", + "```\n", + " ^Y\n", + " |\n", + " 2 __\n", + " | |\n", + " | |\n", + " | |\n", + " (1) LCol\n", + " | |\n", + " | |\n", + " | |\n", + " =1= ---- -------->X\n", + "```\n", + "\n", + "- Node 1: fixed base\n", + "- Node 2: free top with `NodalMass`\n", + "- Elastic beam-column element\n", + "- Gravity load (2000 kip downward) followed by lateral pushover (displacement-controlled)\n", + "\n", + "For a simpler introduction to PyLauncher sweeps, see [pylauncher_sweep.ipynb](pylauncher_sweep.ipynb)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install dapi --quiet" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from pathlib import Path\n", + "from dapi import DSClient\n", + "\n", + "ds = DSClient()\n", + "\n", + "MYDATA = Path(os.environ.get(\"JUPYTER_SERVER_ROOT\", os.path.expanduser(\"~\"))) / \"MyData\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Write the analysis script\n", + "\n", + "An OpenSeesPy cantilever pushover script based on the\n", + "[OpenSees Examples Manual](https://opensees.berkeley.edu/wiki/index.php/Examples_Manual).\n", + "It accepts `--NodalMass`, `--LCol`, and `--outDir` as command-line arguments\n", + "so PyLauncher can run each parameter combination independently." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "input_dir = MYDATA / \"opensees_sweep\"\n", + "input_dir.mkdir(parents=True, exist_ok=True)\n", + "\n", + "cantilever_script = \"\"\"\\\n", + "# Ex1a.Canti2D.Push — OpenSeesPy cantilever pushover\n", + "# Based on the OpenSees Examples Manual\n", + "# Units: kip, inch, second\n", + "#\n", + "# Command-line arguments (set by PyLauncher per task):\n", + "# --NodalMass mass at free node\n", + "# --LCol column length\n", + "# --outDir output directory for this run\n", + "\n", + "import argparse\n", + "import os\n", + "\n", + "if os.path.exists(\"opensees.so\"):\n", + " import opensees as ops\n", + "else:\n", + " import openseespy.opensees as ops\n", + "\n", + "parser = argparse.ArgumentParser()\n", + "parser.add_argument(\"--NodalMass\", type=float, required=True)\n", + "parser.add_argument(\"--LCol\", type=float, required=True)\n", + "parser.add_argument(\"--outDir\", type=str, required=True)\n", + "args = parser.parse_args()\n", + "\n", + "NodalMass = args.NodalMass\n", + "LCol = args.LCol\n", + "outDir = args.outDir\n", + "\n", + "os.makedirs(outDir, exist_ok=True)\n", + "print(f\"Running: NodalMass={NodalMass}, LCol={LCol}, outDir={outDir}\")\n", + "\n", + "ops.wipe()\n", + "ops.model(\"basic\", \"-ndm\", 2, \"-ndf\", 3)\n", + "\n", + "# Geometry\n", + "ops.node(1, 0, 0)\n", + "ops.node(2, 0, LCol)\n", + "ops.fix(1, 1, 1, 1)\n", + "ops.mass(2, NodalMass, 0.0, 0.0)\n", + "\n", + "# Element\n", + "ops.geomTransf(\"Linear\", 1)\n", + "ops.element(\"elasticBeamColumn\", 1, 1, 2, 3600000000, 4227, 1080000, 1)\n", + "\n", + "# Recorders\n", + "ops.recorder(\"Node\", \"-file\", f\"{outDir}/DFree.out\", \"-time\", \"-node\", 2, \"-dof\", 1, 2, 3, \"disp\")\n", + "ops.recorder(\"Node\", \"-file\", f\"{outDir}/RBase.out\", \"-time\", \"-node\", 1, \"-dof\", 1, 2, 3, \"reaction\")\n", + "ops.recorder(\"Element\", \"-file\", f\"{outDir}/FCol.out\", \"-time\", \"-ele\", 1, \"globalForce\")\n", + "\n", + "# Gravity analysis\n", + "ops.timeSeries(\"Linear\", 1)\n", + "ops.pattern(\"Plain\", 1, 1)\n", + "ops.load(2, 0.0, -2000.0, 0.0)\n", + "ops.wipeAnalysis()\n", + "ops.constraints(\"Plain\")\n", + "ops.numberer(\"Plain\")\n", + "ops.system(\"BandGeneral\")\n", + "ops.test(\"NormDispIncr\", 1.0e-8, 6)\n", + "ops.algorithm(\"Newton\")\n", + "ops.integrator(\"LoadControl\", 0.1)\n", + "ops.analysis(\"Static\")\n", + "ops.analyze(10)\n", + "ops.loadConst(\"-time\", 0.0)\n", + "\n", + "# Pushover analysis\n", + "ops.timeSeries(\"Linear\", 2)\n", + "ops.pattern(\"Plain\", 2, 2)\n", + "ops.load(2, 2000.0, 0.0, 0.0)\n", + "ops.integrator(\"DisplacementControl\", 2, 1, 0.1)\n", + "ops.analyze(1000)\n", + "\n", + "print(f\"Done: NodalMass={NodalMass}, LCol={LCol}\")\n", + "\"\"\"\n", + "\n", + "(input_dir / \"cantilever.py\").write_text(cantilever_script)\n", + "print(f\"Wrote {input_dir}/cantilever.py\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Define the sweep\n", + "\n", + "5 nodal masses x 3 column lengths = 15 independent analyses." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sweep = {\n", + " \"NODAL_MASS\": [4.19, 4.39, 4.59, 4.79, 4.99],\n", + " \"LCOL\": [100, 200, 300],\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Preview" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df = ds.jobs.parametric_sweep.generate(\n", + " \"python3 cantilever.py --NodalMass NODAL_MASS --LCol LCOL --outDir out_NODAL_MASS_LCOL\",\n", + " sweep,\n", + " preview=True,\n", + ")\n", + "print(f\"Total runs: {len(df)}\")\n", + "df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Generate sweep files" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "commands = ds.jobs.parametric_sweep.generate(\n", + " \"python3 cantilever.py --NodalMass NODAL_MASS --LCol LCOL --outDir out_NODAL_MASS_LCOL\",\n", + " sweep,\n", + " str(input_dir),\n", + ")\n", + "\n", + "print(f\"Generated {len(commands)} task commands\\n\")\n", + "print(\"=== runsList.txt ===\")\n", + "print((input_dir / \"runsList.txt\").read_text())\n", + "\n", + "print(\"=== call_pylauncher.py ===\")\n", + "print((input_dir / \"call_pylauncher.py\").read_text())\n", + "\n", + "print(\"=== Files in input directory ===\")\n", + "for fn in sorted(os.listdir(input_dir)):\n", + " print(f\" {fn}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Submit\n", + "\n", + "Replace `your_allocation` with your TACC allocation and uncomment to run." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# job = ds.jobs.parametric_sweep.submit(\n", + "# \"/MyData/opensees_sweep/\",\n", + "# app_id=\"designsafe-agnostic-app\",\n", + "# allocation=\"your_allocation\",\n", + "# node_count=1,\n", + "# cores_per_node=48,\n", + "# max_minutes=30,\n", + "# )\n", + "# job.monitor()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.0" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/pylauncher/pylauncher_sweep.ipynb b/examples/pylauncher/pylauncher_sweep.ipynb new file mode 100644 index 0000000..378bea0 --- /dev/null +++ b/examples/pylauncher/pylauncher_sweep.ipynb @@ -0,0 +1,206 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# PyLauncher Parameter Sweeps with dapi\n", + "\n", + "This notebook demonstrates how to use dapi's parameter sweep utilities to generate\n", + "PyLauncher task lists and submit sweep jobs on DesignSafe.\n", + "\n", + "**PyLauncher** runs many independent serial tasks within a single SLURM allocation —\n", + "ideal for parameter studies, Monte Carlo simulations, and batch processing.\n", + "\n", + "For an OpenSees-specific example, see [pylauncher_opensees.ipynb](pylauncher_opensees.ipynb)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install dapi --quiet" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from pathlib import Path\n", + "from dapi import DSClient\n", + "\n", + "ds = DSClient()\n", + "\n", + "# On DesignSafe JupyterHub, ~/MyData is /home/jupyter/MyData.\n", + "# Locally, we use a local directory but the Tapis path stays /MyData/...\n", + "MYDATA = Path(os.environ.get(\"JUPYTER_SERVER_ROOT\", os.path.expanduser(\"~\"))) / \"MyData\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Write the script\n", + "\n", + "A simple script that takes `--alpha`, `--beta`, and `--output` via command line,\n", + "computes `result = alpha * beta`, and writes a JSON output file." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "input_dir = MYDATA / \"pylauncher_demo\"\n", + "input_dir.mkdir(parents=True, exist_ok=True)\n", + "\n", + "simulate_script = '''\\\n", + "\"\"\"simulate.py — minimal demo script for PyLauncher parameter sweeps.\n", + "\n", + "Accepts --alpha, --beta, and --output via command line.\n", + "Computes result = alpha * beta and writes it to the output directory.\n", + "\"\"\"\n", + "import argparse\n", + "import os\n", + "import json\n", + "\n", + "parser = argparse.ArgumentParser()\n", + "parser.add_argument(\"--alpha\", type=float, required=True)\n", + "parser.add_argument(\"--beta\", type=float, required=True)\n", + "parser.add_argument(\"--output\", type=str, required=True)\n", + "args = parser.parse_args()\n", + "\n", + "os.makedirs(args.output, exist_ok=True)\n", + "\n", + "result = args.alpha * args.beta\n", + "summary = {\"alpha\": args.alpha, \"beta\": args.beta, \"result\": result}\n", + "\n", + "outfile = os.path.join(args.output, \"result.json\")\n", + "with open(outfile, \"w\") as f:\n", + " json.dump(summary, f, indent=2)\n", + "\n", + "print(f\"alpha={args.alpha}, beta={args.beta} -> result={result:.4f} written to {outfile}\")\n", + "'''\n", + "\n", + "(input_dir / \"simulate.py\").write_text(simulate_script)\n", + "print(f\"Wrote {input_dir}/simulate.py\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Define the sweep" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sweep = {\n", + " \"ALPHA\": [0.3, 0.5, 3.7],\n", + " \"BETA\": [1.1, 2.0, 3.0],\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Preview (dry run)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "ds.jobs.parametric_sweep.generate(\n", + " \"python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA\",\n", + " sweep,\n", + " preview=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Generate sweep files" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "commands = ds.jobs.parametric_sweep.generate(\n", + " \"python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA\",\n", + " sweep,\n", + " str(input_dir),\n", + " debug=\"host+job\",\n", + ")\n", + "\n", + "print(f\"Generated {len(commands)} task commands\\n\")\n", + "print(\"=== runsList.txt ===\")\n", + "print((input_dir / \"runsList.txt\").read_text())\n", + "\n", + "print(\"=== call_pylauncher.py ===\")\n", + "print((input_dir / \"call_pylauncher.py\").read_text())\n", + "\n", + "print(\"=== Files in input directory ===\")\n", + "for fn in sorted(os.listdir(input_dir)):\n", + " print(f\" {fn}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Submit\n", + "\n", + "Replace `your_allocation` with your TACC allocation and uncomment to run." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# job = ds.jobs.parametric_sweep.submit(\n", + "# \"/MyData/pylauncher_demo/\",\n", + "# app_id=\"designsafe-agnostic-app\",\n", + "# allocation=\"your_allocation\",\n", + "# node_count=1,\n", + "# cores_per_node=48,\n", + "# max_minutes=30,\n", + "# )\n", + "# job.monitor()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.0" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/pylauncher_sweep.ipynb b/examples/pylauncher_sweep.ipynb deleted file mode 100644 index 7a8156e..0000000 --- a/examples/pylauncher_sweep.ipynb +++ /dev/null @@ -1,390 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": "# PyLauncher Parameter Sweeps with dapi\n\nThis notebook demonstrates how to use dapi's parameter sweep utilities to generate\nPyLauncher task lists and submit sweep jobs on DesignSafe.\n\n**PyLauncher** runs many independent serial tasks within a single SLURM allocation —\nideal for parameter studies, Monte Carlo simulations, and batch processing.\n\n**What this notebook covers:**\n\n1. **Generic demo** — a minimal `simulate.py` with `--alpha`/`--beta` parameters\n2. **OpenSees demo** — cantilever pushover with `--NodalMass`/`--LCol` sweep" - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%pip install --user dapi --quiet" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "from pathlib import Path\n", - "from dapi import DSClient\n", - "\n", - "ds = DSClient()\n", - "\n", - "# On DesignSafe JupyterHub, ~/MyData is /home/jupyter/MyData.\n", - "# Locally, we use a local directory but the Tapis path stays /MyData/...\n", - "MYDATA = Path(os.environ.get(\"JUPYTER_SERVER_ROOT\", os.path.expanduser(\"~\"))) / \"MyData\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "\n", - "## Part 1: Generic Demo\n", - "\n", - "A simple example sweeping over two parameters (`--alpha`, `--beta`). The script\n", - "computes `result = alpha * beta`, writes it to a JSON output file, and prints a summary.\n", - "\n", - "### Write the script" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "input_dir_generic = MYDATA / \"pylauncher_demo\"\n", - "input_dir_generic.mkdir(parents=True, exist_ok=True)\n", - "\n", - "simulate_script = '''\\\n", - "\"\"\"simulate.py — minimal demo script for PyLauncher parameter sweeps.\n", - "\n", - "Accepts --alpha, --beta, and --output via command line.\n", - "Computes result = alpha * beta and writes it to the output directory.\n", - "\"\"\"\n", - "import argparse\n", - "import os\n", - "import json\n", - "\n", - "parser = argparse.ArgumentParser()\n", - "parser.add_argument(\"--alpha\", type=float, required=True)\n", - "parser.add_argument(\"--beta\", type=float, required=True)\n", - "parser.add_argument(\"--output\", type=str, required=True)\n", - "args = parser.parse_args()\n", - "\n", - "os.makedirs(args.output, exist_ok=True)\n", - "\n", - "result = args.alpha * args.beta\n", - "summary = {\"alpha\": args.alpha, \"beta\": args.beta, \"result\": result}\n", - "\n", - "outfile = os.path.join(args.output, \"result.json\")\n", - "with open(outfile, \"w\") as f:\n", - " json.dump(summary, f, indent=2)\n", - "\n", - "print(f\"alpha={args.alpha}, beta={args.beta} -> result={result:.4f} written to {outfile}\")\n", - "'''\n", - "\n", - "(input_dir_generic / \"simulate.py\").write_text(simulate_script)\n", - "print(f\"Wrote {input_dir_generic}/simulate.py\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Define the sweep" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "sweep = {\n", - " \"ALPHA\": [0.3, 0.5, 3.7],\n", - " \"BETA\": [1.1, 2.0, 3.0],\n", - "}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Preview (dry run)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ds.jobs.parametric_sweep.generate(\n", - " \"python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA\",\n", - " sweep,\n", - " preview=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Generate sweep files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "commands = ds.jobs.parametric_sweep.generate(\n", - " \"python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA\",\n", - " sweep,\n", - " str(input_dir_generic),\n", - " debug=\"host+job\",\n", - ")\n", - "\n", - "print(f\"Generated {len(commands)} task commands\\n\")\n", - "print(\"=== runsList.txt ===\")\n", - "print((input_dir_generic / \"runsList.txt\").read_text())\n", - "\n", - "print(\"=== call_pylauncher.py ===\")\n", - "print((input_dir_generic / \"call_pylauncher.py\").read_text())\n", - "\n", - "print(\"=== Files in input directory ===\")\n", - "for fn in sorted(os.listdir(input_dir_generic)):\n", - " print(f\" {fn}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Submit\n", - "\n", - "Replace `your_allocation` with your TACC allocation and uncomment to run." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# job = ds.jobs.parametric_sweep.submit(\n", - "# \"/MyData/pylauncher_demo/\",\n", - "# app_id=\"designsafe-agnostic-app\",\n", - "# allocation=\"your_allocation\",\n", - "# node_count=1,\n", - "# cores_per_node=48,\n", - "# max_minutes=30,\n", - "# )\n", - "# job.monitor()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": "---\n\n## Part 2: OpenSees Cantilever Pushover Sweep\n\nA real-world example using a 2D cantilever pushover analysis.\nWe sweep over `NodalMass` and `LCol` (column length) to study how these structural\nparameters affect the pushover response.\n\nThe cantilever model:\n```\n ^Y\n |\n 2 __\n | |\n | |\n | |\n (1) LCol\n | |\n | |\n | |\n =1= ---- -------->X\n```\n\n- Node 1: fixed base\n- Node 2: free top with `NodalMass`\n- Elastic beam-column element\n- Gravity load (2000 kip downward) followed by lateral pushover (displacement-controlled)\n\n### Write the analysis script\n\nAn OpenSeesPy cantilever pushover script based on the\n[OpenSees Examples Manual](https://opensees.berkeley.edu/wiki/index.php/Examples_Manual).\nIt accepts `--NodalMass`, `--LCol`, and `--outDir` as command-line arguments\nso PyLauncher can run each parameter combination independently." - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "input_dir_opensees = MYDATA / \"opensees_sweep\"\n", - "input_dir_opensees.mkdir(parents=True, exist_ok=True)\n", - "\n", - "cantilever_script = \"\"\"\\\n", - "# Ex1a.Canti2D.Push — OpenSeesPy cantilever pushover\n", - "# Based on the OpenSees Examples Manual\n", - "# Units: kip, inch, second\n", - "#\n", - "# Command-line arguments (set by PyLauncher per task):\n", - "# --NodalMass mass at free node\n", - "# --LCol column length\n", - "# --outDir output directory for this run\n", - "\n", - "import argparse\n", - "import os\n", - "\n", - "if os.path.exists(\"opensees.so\"):\n", - " import opensees as ops\n", - "else:\n", - " import openseespy.opensees as ops\n", - "\n", - "parser = argparse.ArgumentParser()\n", - "parser.add_argument(\"--NodalMass\", type=float, required=True)\n", - "parser.add_argument(\"--LCol\", type=float, required=True)\n", - "parser.add_argument(\"--outDir\", type=str, required=True)\n", - "args = parser.parse_args()\n", - "\n", - "NodalMass = args.NodalMass\n", - "LCol = args.LCol\n", - "outDir = args.outDir\n", - "\n", - "os.makedirs(outDir, exist_ok=True)\n", - "print(f\"Running: NodalMass={NodalMass}, LCol={LCol}, outDir={outDir}\")\n", - "\n", - "ops.wipe()\n", - "ops.model(\"basic\", \"-ndm\", 2, \"-ndf\", 3)\n", - "\n", - "# Geometry\n", - "ops.node(1, 0, 0)\n", - "ops.node(2, 0, LCol)\n", - "ops.fix(1, 1, 1, 1)\n", - "ops.mass(2, NodalMass, 0.0, 0.0)\n", - "\n", - "# Element\n", - "ops.geomTransf(\"Linear\", 1)\n", - "ops.element(\"elasticBeamColumn\", 1, 1, 2, 3600000000, 4227, 1080000, 1)\n", - "\n", - "# Recorders\n", - "ops.recorder(\"Node\", \"-file\", f\"{outDir}/DFree.out\", \"-time\", \"-node\", 2, \"-dof\", 1, 2, 3, \"disp\")\n", - "ops.recorder(\"Node\", \"-file\", f\"{outDir}/RBase.out\", \"-time\", \"-node\", 1, \"-dof\", 1, 2, 3, \"reaction\")\n", - "ops.recorder(\"Element\", \"-file\", f\"{outDir}/FCol.out\", \"-time\", \"-ele\", 1, \"globalForce\")\n", - "\n", - "# Gravity analysis\n", - "ops.timeSeries(\"Linear\", 1)\n", - "ops.pattern(\"Plain\", 1, 1)\n", - "ops.load(2, 0.0, -2000.0, 0.0)\n", - "ops.wipeAnalysis()\n", - "ops.constraints(\"Plain\")\n", - "ops.numberer(\"Plain\")\n", - "ops.system(\"BandGeneral\")\n", - "ops.test(\"NormDispIncr\", 1.0e-8, 6)\n", - "ops.algorithm(\"Newton\")\n", - "ops.integrator(\"LoadControl\", 0.1)\n", - "ops.analysis(\"Static\")\n", - "ops.analyze(10)\n", - "ops.loadConst(\"-time\", 0.0)\n", - "\n", - "# Pushover analysis\n", - "ops.timeSeries(\"Linear\", 2)\n", - "ops.pattern(\"Plain\", 2, 2)\n", - "ops.load(2, 2000.0, 0.0, 0.0)\n", - "ops.integrator(\"DisplacementControl\", 2, 1, 0.1)\n", - "ops.analyze(1000)\n", - "\n", - "print(f\"Done: NodalMass={NodalMass}, LCol={LCol}\")\n", - "\"\"\"\n", - "\n", - "(input_dir_opensees / \"cantilever.py\").write_text(cantilever_script)\n", - "print(f\"Wrote {input_dir_opensees}/cantilever.py\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Define the sweep" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "sweep_opensees = {\n", - " \"NODAL_MASS\": [4.19, 4.39, 4.59, 4.79, 4.99],\n", - " \"LCOL\": [100, 200, 300],\n", - "}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Preview" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "df = ds.jobs.parametric_sweep.generate(\n", - " \"python3 cantilever.py --NodalMass NODAL_MASS --LCol LCOL --outDir out_NODAL_MASS_LCOL\",\n", - " sweep_opensees,\n", - " preview=True,\n", - ")\n", - "print(f\"Total runs: {len(df)}\")\n", - "df" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Generate sweep files" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "commands = ds.jobs.parametric_sweep.generate(\n", - " \"python3 cantilever.py --NodalMass NODAL_MASS --LCol LCOL --outDir out_NODAL_MASS_LCOL\",\n", - " sweep_opensees,\n", - " str(input_dir_opensees),\n", - ")\n", - "\n", - "print(f\"Generated {len(commands)} task commands\\n\")\n", - "print(\"=== runsList.txt ===\")\n", - "print((input_dir_opensees / \"runsList.txt\").read_text())\n", - "\n", - "print(\"=== call_pylauncher.py ===\")\n", - "print((input_dir_opensees / \"call_pylauncher.py\").read_text())\n", - "\n", - "print(\"=== Files in input directory ===\")\n", - "for fn in sorted(os.listdir(input_dir_opensees)):\n", - " print(f\" {fn}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Submit\n", - "\n", - "Replace `your_allocation` with your TACC allocation and uncomment to run." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# job = ds.jobs.parametric_sweep.submit(\n", - "# \"/MyData/opensees_sweep/\",\n", - "# app_id=\"designsafe-agnostic-app\",\n", - "# allocation=\"your_allocation\",\n", - "# node_count=1,\n", - "# cores_per_node=48,\n", - "# max_minutes=30,\n", - "# )\n", - "# job.monitor()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "name": "python", - "version": "3.11.0" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} \ No newline at end of file From ebeb562c4db703200145f7396c06fcd877cafaa9 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Sun, 22 Mar 2026 11:38:57 -0500 Subject: [PATCH 20/21] Dapi examples 0.5.0 --- README.md | 25 +- _toc.yml | 37 -- dapi/__init__.py | 64 +-- docs/api/apps.md | 92 ---- docs/api/auth.md | 62 --- docs/api/client.md | 603 ----------------------- docs/api/database.md | 190 ------- docs/api/exceptions.md | 176 ------- docs/api/files.md | 199 -------- docs/api/index.md | 84 ---- docs/api/jobs.md | 445 ----------------- docs/api/launcher.md | 137 ----- docs/api/systems.md | 190 ------- docs/authentication.md | 290 ++--------- docs/database.md | 483 +++--------------- docs/examples.md | 87 +--- docs/examples/pylauncher.md | 20 +- docs/index.md | 70 +-- docs/installation.md | 152 +----- docs/jobs.md | 342 +++---------- docs/quickstart.md | 358 ++------------ examples/apps.ipynb | 6 +- examples/db.ipynb | 5 +- examples/mpm/mpm.ipynb | 6 +- examples/openfoam/openfoam-minimal.ipynb | 7 +- myst.yml | 21 +- 26 files changed, 358 insertions(+), 3793 deletions(-) delete mode 100644 _toc.yml delete mode 100644 docs/api/apps.md delete mode 100644 docs/api/auth.md delete mode 100644 docs/api/client.md delete mode 100644 docs/api/database.md delete mode 100644 docs/api/exceptions.md delete mode 100644 docs/api/files.md delete mode 100644 docs/api/index.md delete mode 100644 docs/api/jobs.md delete mode 100644 docs/api/launcher.md delete mode 100644 docs/api/systems.md diff --git a/README.md b/README.md index 7d3cd76..d67d6e4 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![PyPI version](https://badge.fury.io/py/dapi.svg)](https://badge.fury.io/py/dapi) [![Docs](https://img.shields.io/badge/view-docs-8A2BE2?color=8A2BE2)](https://designsafe-ci.github.io/dapi/) -`dapi` is a Python library that simplifies the process of submitting, running, and monitoring [TAPIS v3](https://tapis.readthedocs.io/en/latest/) jobs on [DesignSafe](https://designsafe-ci.org) via [Jupyter Notebooks](https://jupyter.designsafe-ci.org) or from the command line. +`dapi` is a Python library for submitting, running, and monitoring [TAPIS v3](https://tapis.readthedocs.io/en/latest/) jobs on [DesignSafe](https://designsafe-ci.org) via [Jupyter Notebooks](https://jupyter.designsafe-ci.org) or the command line. dapi @@ -50,10 +50,29 @@ For e.g., to add the environment variable `NGL_DB_USER` edit `~/.bashrc`, `~/.zs pip install dapi ``` -To install the current development version: +To install the development version: ```shell -pip install git+https://github.com/DesignSafe-CI/dapi.git --quiet +pip install git+https://github.com/DesignSafe-CI/dapi.git@dev --quiet +``` + +In Jupyter notebooks: + +```python +%pip install dapi --quiet +``` + +To test the latest dev branch in a notebook: + +```python +%pip uninstall dapi --yes +%pip install git+https://github.com/DesignSafe-CI/dapi.git@dev --quiet +``` + +For local development (editable install — changes to source are reflected immediately): + +```shell +pip install -e . ``` ## Quick Start diff --git a/_toc.yml b/_toc.yml deleted file mode 100644 index 9f8732b..0000000 --- a/_toc.yml +++ /dev/null @@ -1,37 +0,0 @@ -# Table of contents -# Learn more at https://jupyterbook.org/customize/toc.html - -format: jb-book -root: docs/index -parts: - - caption: Getting Started - chapters: - - file: docs/installation - - file: docs/authentication - - file: docs/quickstart - - caption: User Guide - chapters: - - file: docs/jobs - - file: docs/database - - caption: Examples - chapters: - - file: docs/examples - - file: docs/examples/apps - - file: docs/examples/mpm - - file: docs/examples/opensees - - file: docs/examples/openfoam - - file: docs/examples/pylauncher - - file: docs/examples/tms_credentials - - file: docs/examples/database - - caption: API Reference - chapters: - - file: docs/api/index - - file: docs/api/client - - file: docs/api/jobs - - file: docs/api/launcher - - file: docs/api/files - - file: docs/api/apps - - file: docs/api/systems - - file: docs/api/database - - file: docs/api/auth - - file: docs/api/exceptions diff --git a/dapi/__init__.py b/dapi/__init__.py index 0f03deb..f32767c 100644 --- a/dapi/__init__.py +++ b/dapi/__init__.py @@ -1,56 +1,26 @@ -"""Dapi - A Python wrapper for interacting with DesignSafe resources via the Tapis API. +"""Python client for submitting, monitoring, and managing TAPIS v3 jobs on DesignSafe. -This package provides a high-level, user-friendly interface for working with DesignSafe -resources through the Tapis V3 API. It simplifies complex operations and provides -organized access to different service areas including authentication, file operations, -job submission and monitoring, application discovery, system information, and database access. +Also provides access to DesignSafe research databases (NGL, Earthquake Recovery, VP) +and file operations (path translation, upload, download). -Key Features: - - Simplified authentication with credential resolution hierarchy - - DesignSafe path translation (MyData, projects, etc.) to Tapis URIs - - High-level job submission with automatic app parameter mapping - - Job monitoring with progress bars and status interpretation - - File upload/download with automatic directory creation - - Application discovery and detailed retrieval - - System queue information and resource limits - - Database access for DesignSafe research databases - - Comprehensive error handling with descriptive exceptions +Classes: + DSClient: Entry point. Provides access to jobs, files, apps, systems, and databases. + SubmittedJob: Returned by ``DSClient.jobs.submit()``. Used to monitor and inspect a job. -Main Components: - DSClient: Main client class providing organized access to all services - SubmittedJob: Class for managing and monitoring submitted Tapis jobs - Exception classes: Specific exceptions for different error types +Example:: -Example: - Basic usage with automatic authentication: + from dapi import DSClient - >>> from dapi import DSClient - >>> client = DSClient() - Enter DesignSafe Username: myuser - Enter DesignSafe Password: [hidden] - Authentication successful. + client = DSClient() + job_request = client.jobs.generate( + app_id="matlab-r2023a", + input_dir_uri="/MyData/analysis/input/", + script_filename="run_analysis.m", + ) + job = client.jobs.submit(job_request) + final_status = job.monitor() - >>> # File operations - >>> client.files.upload("/local/file.txt", "/MyData/uploads/file.txt") - >>> files = client.files.list("/MyData/uploads/") - - >>> # Job submission and monitoring - >>> job_request = client.jobs.generate( - ... app_id="matlab-r2023a", - ... input_dir_uri="/MyData/analysis/input/", - ... script_filename="run_analysis.m", - ... ) - >>> job = client.jobs.submit(job_request) - >>> final_status = job.monitor() - - >>> # Database access - >>> df = client.db.ngl.read_sql("SELECT * FROM earthquake_data LIMIT 10") - -Attributes: - __version__ (str): The version number of the dapi package. - DSClient: Main client class for DesignSafe API interactions. - SubmittedJob: Class for managing submitted Tapis jobs. - Exception classes: Custom exceptions for specific error conditions. + df = client.db.ngl.read_sql("SELECT * FROM SITE LIMIT 10") """ from .client import DSClient diff --git a/docs/api/apps.md b/docs/api/apps.md deleted file mode 100644 index d5538d0..0000000 --- a/docs/api/apps.md +++ /dev/null @@ -1,92 +0,0 @@ -# Apps - -Application discovery and management for DesignSafe computational applications. - -All functions below accept an authenticated Tapis client as the first argument. -When using the `DSClient`, the Tapis client is supplied automatically and the -methods are available under `ds.apps`. - -| Module function | Client shorthand | -|---|---| -| `find_apps(t, ...)` | `ds.apps.find(...)` | -| `get_app_details(t, ...)` | `ds.apps.get_details(...)` | - ---- - -## Application Discovery - -### `find_apps(t, search_term, list_type="ALL", verbose=True)` - -Search for Tapis apps matching a search term. - -Searches through available Tapis applications using partial name matching. -This function helps discover applications available for job submission. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `search_term` (`str`): Name or partial name to search for. Use an empty string for all apps. Supports partial matching with wildcards. -- `list_type` (`str`, optional): Type of apps to list. Must be one of: `"OWNED"`, `"SHARED_PUBLIC"`, `"SHARED_DIRECT"`, `"READ_PERM"`, `"MINE"`, `"ALL"`. Defaults to `"ALL"`. -- `verbose` (`bool`, optional): If `True`, prints a summary of found apps including ID, version, and owner information. Defaults to `True`. - -**Returns:** `List[Tapis]` -- List of matching Tapis app objects with selected fields (`id`, `version`, `owner`). - -**Raises:** - -- `AppDiscoveryError`: If the Tapis API search fails or an unexpected error occurs during the search operation. - -**Example:** - -```python -from dapi.apps import find_apps - -apps = find_apps(client, "matlab", verbose=True) -# Found 3 matching apps: -# - matlab-r2023a (Version: 1.0, Owner: designsafe) -# - matlab-parallel (Version: 2.1, Owner: tacc) -# - matlab-desktop (Version: 1.5, Owner: designsafe) - -# Using DSClient: -apps = ds.apps.find("matlab") -``` - ---- - -## Application Details - -### `get_app_details(t, app_id, app_version=None, verbose=True)` - -Get detailed information for a specific app ID and version. - -Retrieves comprehensive details about a specific Tapis application, -including job attributes, execution system, and parameter definitions. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `app_id` (`str`): Exact app ID to look up. Must match exactly. -- `app_version` (`Optional[str]`, optional): Specific app version to retrieve. If `None`, fetches the latest available version. Defaults to `None`. -- `verbose` (`bool`, optional): If `True`, prints basic app information including ID, version, owner, execution system, and description. Defaults to `True`. - -**Returns:** `Optional[Tapis]` -- Tapis app object with full details including `jobAttributes`, `parameterSet`, and other configuration. Returns `None` if the app is not found. - -**Raises:** - -- `AppDiscoveryError`: If the Tapis API call fails (except for 404 not found) or an unexpected error occurs during retrieval. - -**Example:** - -```python -from dapi.apps import get_app_details - -app = get_app_details(client, "matlab-r2023a", "1.0") -# App Details: -# ID: matlab-r2023a -# Version: 1.0 -# Owner: designsafe -# Execution System: frontera -# Description: MATLAB R2023a runtime environment - -# Using DSClient: -app = ds.apps.get_details("matlab-r2023a", "1.0") -``` diff --git a/docs/api/auth.md b/docs/api/auth.md deleted file mode 100644 index e77b831..0000000 --- a/docs/api/auth.md +++ /dev/null @@ -1,62 +0,0 @@ -# Auth - -Authentication and credential management for DesignSafe access. - -## Authentication - -### `init` - -```python -dapi.auth.init( - base_url: str = "https://designsafe.tapis.io", - username: str = None, - password: str = None, - env_file: str = None, -) -> Tapis -``` - -Initialize and authenticate a Tapis client for DesignSafe. - -Creates and authenticates a Tapis client instance for interacting with DesignSafe resources. The function follows a credential resolution hierarchy and handles secure password input when needed. - -**Credential Resolution Order:** - -1. Explicitly passed `username`/`password` arguments -2. Environment variables (`DESIGNSAFE_USERNAME`, `DESIGNSAFE_PASSWORD`) -- loads from `env_file` if specified, otherwise checks system environment -3. Interactive prompts for missing credentials - -**Parameters:** - -| Name | Type | Default | Description | -|------|------|---------|-------------| -| `base_url` | `str` | `"https://designsafe.tapis.io"` | The Tapis base URL for DesignSafe API endpoints. | -| `username` | `str` | `None` | Explicit DesignSafe username. If `None`, attempts to load from environment or prompts the user. | -| `password` | `str` | `None` | Explicit DesignSafe password. If `None`, attempts to load from environment or prompts the user securely. | -| `env_file` | `str` | `None` | Path to a `.env` file containing credentials. If `None`, attempts to load from a default `.env` file if it exists. | - -**Returns:** - -`Tapis` -- An authenticated `tapipy.Tapis` client object ready for API calls. - -**Raises:** - -- {py:class}`~dapi.exceptions.AuthenticationError` -- If authentication fails due to invalid credentials, network issues, or if required credentials cannot be obtained. - -**Examples:** - -```python -# Using explicit credentials -client = init(username="myuser", password="mypass") - -# Using environment variables or .env file -client = init(env_file=".env") - -# Interactive authentication -client = init() -# Enter DesignSafe Username: myuser -# Enter DesignSafe Password: [hidden] -``` - -:::{note} -The function disables automatic spec downloads for faster initialization. Password input uses `getpass` for secure entry in terminal environments. -::: diff --git a/docs/api/client.md b/docs/api/client.md deleted file mode 100644 index 245ffcf..0000000 --- a/docs/api/client.md +++ /dev/null @@ -1,603 +0,0 @@ -# DSClient - -The main client interface for all DAPI functionality. DSClient provides organized access to DesignSafe resources through the Tapis V3 API. - -## DSClient - -### `DSClient(tapis_client=None, **auth_kwargs)` - -Main client for interacting with DesignSafe resources via Tapis V3 using dapi. - -The DSClient provides a high-level interface for working with DesignSafe resources -through the Tapis V3 API. It handles authentication and provides organized access -to different service areas including applications, files, jobs, systems, and databases. - -**Args:** -- `tapis_client` (Tapis, optional): Pre-authenticated Tapis client instance. If provided, it will be used instead of creating a new one. -- `**auth_kwargs`: Additional authentication arguments passed to `auth.init()` when `tapis_client` is not provided. Common arguments include: - - `username` (str): DesignSafe username - - `password` (str): DesignSafe password - - `base_url` (str): Tapis base URL - - `env_file` (str): Path to `.env` file with credentials - -**Attributes:** -- `tapis` (Tapis): The underlying authenticated Tapis client instance. -- `apps` (AppMethods): Interface for application discovery and details. -- `files` (FileMethods): Interface for file operations (upload, download, list). -- `jobs` (JobMethods): Interface for job submission and monitoring. -- `systems` (SystemMethods): Interface for system information and queues. -- `db` (DatabaseAccessor): Interface for database connections and queries. - -**Raises:** -- `TypeError`: If `tapis_client` is provided but is not a Tapis instance. -- `AuthenticationError`: If authentication fails when creating a new Tapis client. - -**Example:** - -```python -# Basic usage with automatic authentication -ds = DSClient() - -# Using explicit credentials -ds = DSClient(username="myuser", password="mypass") - -# Using a pre-authenticated Tapis client -from tapipy.tapis import Tapis -tapis = Tapis(base_url="https://designsafe.tapis.io", ...) -tapis.get_tokens() -ds = DSClient(tapis_client=tapis) -``` - -## Accessing the Raw Tapis Client - -For advanced use cases or accessing Tapis APIs not wrapped by dapi, you can get the underlying Tapis client: - -```python -from dapi import DSClient - -# Initialize DSClient -ds = DSClient() - -# Access the raw Tapis client -tapis_client = ds.tapis - -# Use raw Tapis APIs directly -raw_apps = tapis_client.apps.getApps(search="*opensees*") -systems = tapis_client.systems.getSystems() -jobs = tapis_client.jobs.getJobList() -``` - -### When to Use the Raw Tapis Client - -- Access Tapis APIs not yet wrapped by dapi -- Use advanced search parameters not exposed by dapi -- Implement custom functionality -- Debug or troubleshoot API calls -- Access experimental or new Tapis features - -:::{warning} -When using the raw Tapis client, you'll need to handle errors and data formatting yourself. The dapi wrapper provides error handling and user-friendly formatting. -::: - -## Service Interfaces - -The DSClient provides access to different DesignSafe services through specialized interface classes: - -### AppMethods - -Interface for Tapis application discovery and details retrieval. - -#### `find(search_term, list_type="ALL", verbose=True)` - -Search for Tapis apps matching a search term. - -**Args:** -- `search_term` (str): Name or partial name to search for. Use empty string for all apps. Supports partial matching with wildcards. -- `list_type` (str, optional): Type of apps to list. Must be one of: `"OWNED"`, `"SHARED_PUBLIC"`, `"SHARED_DIRECT"`, `"READ_PERM"`, `"MINE"`, `"ALL"`. Defaults to `"ALL"`. -- `verbose` (bool, optional): If `True`, prints summary of found apps including ID, version, and owner information. Defaults to `True`. - -**Returns:** `List[Tapis]` -- List of matching Tapis app objects with selected fields (id, version, owner). - -**Raises:** -- `AppDiscoveryError`: If the Tapis API search fails. - -**Example:** - -```python -apps = ds.apps.find("matlab") -# Found 3 matching apps: -# - matlab-r2023a (Version: 1.0, Owner: designsafe) -# - matlab-parallel (Version: 2.1, Owner: tacc) -``` - ---- - -#### `get_details(app_id, app_version=None, verbose=True)` - -Get detailed information for a specific app ID and version. - -**Args:** -- `app_id` (str): Exact app ID to look up. Must match exactly. -- `app_version` (str, optional): Specific app version to retrieve. If `None`, fetches the latest available version. Defaults to `None`. -- `verbose` (bool, optional): If `True`, prints basic app information including ID, version, owner, execution system, and description. Defaults to `True`. - -**Returns:** `Optional[Tapis]` -- Tapis app object with full details including jobAttributes, parameterSet, and other configuration. Returns `None` if the app is not found. - -**Raises:** -- `AppDiscoveryError`: If the Tapis API call fails (except for 404 not found). - -**Example:** - -```python -app = ds.apps.get_details("matlab-r2023a", "1.0") -# App Details: -# ID: matlab-r2023a -# Version: 1.0 -# Execution System: frontera -``` - ---- - -### FileMethods - -Interface for file operations on Tapis storage systems. - -#### `to_uri(path, verify_exists=False)` - -Translate DesignSafe-style paths to Tapis URIs. - -**Args:** -- `path` (str): The DesignSafe-style path string to translate. Supported formats: - - MyData paths: `"/MyData/folder"`, `"jupyter/MyData/folder"` - - Community paths: `"/CommunityData/folder"` - - Project paths: `"/projects/PRJ-XXXX/folder"` - - Direct Tapis URIs: `"tapis://system-id/path"` -- `verify_exists` (bool, optional): If `True`, verifies the translated path exists on the target Tapis system. Defaults to `False`. - -**Returns:** `str` -- The corresponding Tapis URI (e.g., `"tapis://system-id/path"`). - -**Raises:** -- `FileOperationError`: If path translation fails or verification fails. -- `AuthenticationError`: If username is required for MyData paths but not available. -- `ValueError`: If the input path format is unrecognized. - -**Example:** - -```python -uri = ds.files.to_uri("/MyData/analysis/results") -# "tapis://designsafe.storage.default/username/analysis/results" - -uri = ds.files.to_uri("/projects/PRJ-1234/data", verify_exists=True) -``` - ---- - -#### `to_path(tapis_uri)` - -Translate Tapis URIs to DesignSafe local paths. - -**Args:** -- `tapis_uri` (str): The Tapis URI to convert. Supported formats: - - `"tapis://designsafe.storage.default/username/path"` -> `"/home/jupyter/MyData/path"` - - `"tapis://designsafe.storage.community/path"` -> `"/home/jupyter/CommunityData/path"` - - `"tapis://project-*/path"` -> `"/home/jupyter/MyProjects/path"` - -**Returns:** `str` -- The corresponding DesignSafe local path, or the original URI if it is not a recognized format. - -**Example:** - -```python -local_path = ds.files.to_path("tapis://designsafe.storage.default/user/data/file.txt") -# "/home/jupyter/MyData/data/file.txt" -``` - ---- - -#### `upload(local_path, remote_uri)` - -Upload a local file to a Tapis storage system. - -**Args:** -- `local_path` (str): Path to the local file to upload. -- `remote_uri` (str): Tapis URI destination (e.g., `"tapis://system/path/file.txt"`). - -**Raises:** -- `FileNotFoundError`: If the local file does not exist. -- `ValueError`: If `local_path` is not a file or `remote_uri` is invalid. -- `FileOperationError`: If the Tapis upload operation fails. - -**Example:** - -```python -ds.files.upload("/local/data.txt", "tapis://mysystem/uploads/data.txt") -``` - ---- - -#### `download(remote_uri, local_path)` - -Download a file from a Tapis storage system to the local filesystem. - -**Args:** -- `remote_uri` (str): Tapis URI of the file to download (e.g., `"tapis://system/path/file.txt"`). -- `local_path` (str): Local filesystem path where the file should be saved. - -**Raises:** -- `ValueError`: If `local_path` is a directory or `remote_uri` is invalid. -- `FileOperationError`: If the download operation fails. - -**Example:** - -```python -ds.files.download("tapis://mysystem/data/results.txt", "/local/results.txt") -``` - ---- - -#### `list(remote_uri, limit=100, offset=0)` - -List files and directories in a Tapis storage system path. - -**Args:** -- `remote_uri` (str): Tapis URI of the directory to list (e.g., `"tapis://system/path/"`). -- `limit` (int, optional): Maximum number of items to return. Defaults to `100`. -- `offset` (int, optional): Number of items to skip (for pagination). Defaults to `0`. - -**Returns:** `List[Tapis]` -- List of file and directory objects from the specified path. Each object contains metadata like name, size, type, and permissions. - -**Raises:** -- `ValueError`: If `remote_uri` is invalid. -- `FileOperationError`: If the listing operation fails or path not found. - -**Example:** - -```python -files = ds.files.list("tapis://mysystem/data/") -for f in files: - print(f"{f.name} ({f.type})") -``` - ---- - -### JobMethods - -Interface for Tapis job submission, monitoring, and management. - -**Attributes:** -- `parametric_sweep` (ParametricSweepMethods): Interface for PyLauncher parameter sweep generation and submission. - -#### `generate(app_id, input_dir_uri, *, script_filename=None, app_version=None, job_name=None, description=None, tags=None, max_minutes=None, node_count=None, cores_per_node=None, memory_mb=None, queue=None, allocation=None, archive_system=None, archive_path=None, extra_file_inputs=None, extra_app_args=None, extra_env_vars=None, extra_scheduler_options=None, script_param_names=["Input Script", "Main Script", "tclScript"], input_dir_param_name="Input Directory", allocation_param_name="TACC Allocation")` - -Generate a Tapis job request dictionary based on app definition and inputs. Automatically retrieves app details and applies user-specified overrides and extra parameters. - -**Args:** -- `app_id` (str): The ID of the Tapis application to use for the job. -- `input_dir_uri` (str): Tapis URI to the input directory containing job files. -- `script_filename` (str, optional): Name of the main script file to execute. If `None`, no script parameter is added (suitable for apps like OpenFOAM). -- `app_version` (str, optional): Specific app version. If `None`, uses latest. -- `job_name` (str, optional): Custom job name. If `None`, auto-generates one. -- `description` (str, optional): Job description. If `None`, uses app description. -- `tags` (List[str], optional): List of tags to associate with the job. -- `max_minutes` (int, optional): Maximum runtime in minutes. Overrides app default. -- `node_count` (int, optional): Number of compute nodes. Overrides app default. -- `cores_per_node` (int, optional): Cores per node. Overrides app default. -- `memory_mb` (int, optional): Memory in MB. Overrides app default. -- `queue` (str, optional): Execution queue name. Overrides app default. -- `allocation` (str, optional): TACC allocation to charge for compute time. -- `archive_system` (str, optional): Archive system for job outputs. Use `"designsafe"` for `designsafe.storage.default`. If `None`, uses app default. -- `archive_path` (str, optional): Archive directory path. Can be a full path or just a directory name in MyData. If `None` and `archive_system` is `"designsafe"`, defaults to `"tapis-jobs-archive/${JobCreateDate}/${JobUUID}"`. -- `extra_file_inputs` (List[Dict], optional): Additional file inputs beyond the main input directory. -- `extra_app_args` (List[Dict], optional): Additional application arguments. -- `extra_env_vars` (List[Dict], optional): Additional environment variables. Each item should be `{"key": "VAR_NAME", "value": "var_value"}`. -- `extra_scheduler_options` (List[Dict], optional): Additional scheduler options. -- `script_param_names` (List[str], optional): Parameter names to check for script placement. Defaults to `["Input Script", "Main Script", "tclScript"]`. -- `input_dir_param_name` (str, optional): Parameter name for input directory. Defaults to `"Input Directory"`. -- `allocation_param_name` (str, optional): Parameter name for allocation. Defaults to `"TACC Allocation"`. - -**Returns:** `Dict[str, Any]` -- Complete job request dictionary ready for submission. - -**Raises:** -- `AppDiscoveryError`: If the specified app cannot be found. -- `ValueError`: If required parameters are missing or invalid. -- `JobSubmissionError`: If job request generation fails. - -**Example:** - -```python -job_request = ds.jobs.generate( - app_id="matlab-r2023a", - input_dir_uri="tapis://designsafe.storage.default/username/input/", - script_filename="run_analysis.m", - max_minutes=120, - allocation="MyProject-123", -) -``` - ---- - -#### `submit(job_request)` - -Submit a job request dictionary to Tapis. - -**Args:** -- `job_request` (Dict[str, Any]): Complete job request dictionary (typically from `generate()`). - -**Returns:** `SubmittedJob` -- A SubmittedJob object for monitoring and managing the job. - -**Raises:** -- `ValueError`: If `job_request` is not a dictionary. -- `JobSubmissionError`: If the Tapis submission fails. - -**Example:** - -```python -job_request = ds.jobs.generate(...) -job = ds.jobs.submit(job_request) -print(f"Job submitted with UUID: {job.uuid}") -``` - ---- - -#### `job(job_uuid)` - -Get a SubmittedJob object for an existing job by UUID. - -**Args:** -- `job_uuid` (str): The UUID of an existing Tapis job. - -**Returns:** `SubmittedJob` -- A job object for monitoring via `.monitor()`. - -**Example:** - -```python -job = ds.jobs.job("12345678-1234-1234-1234-123456789abc") -job.monitor() -``` - ---- - -#### `status(job_uuid)` - -Get the current status of a job by UUID. - -**Args:** -- `job_uuid` (str): The UUID of the job to check. - -**Returns:** `str` -- The current job status (e.g., `"QUEUED"`, `"RUNNING"`, `"FINISHED"`). - -**Raises:** -- `JobMonitorError`: If status retrieval fails. - -**Example:** - -```python -ds.jobs.status("12345678-1234-1234-1234-123456789abc") -# 'FINISHED' -``` - ---- - -#### `runtime_summary(job_uuid, verbose=False)` - -Print the runtime summary for a job by UUID. - -**Args:** -- `job_uuid` (str): The UUID of the job to analyze. -- `verbose` (bool, optional): If `True`, prints detailed job history events. Defaults to `False`. - -**Example:** - -```python -ds.jobs.runtime_summary("12345678-1234-1234-1234-123456789abc") -# Runtime Summary -# --------------- -# QUEUED time: 00:05:30 -# RUNNING time: 01:23:45 -# TOTAL time: 01:29:15 -``` - ---- - -#### `interpret_status(final_status, job_uuid=None)` - -Print a user-friendly interpretation of a job status. - -**Args:** -- `final_status` (str): The job status to interpret. -- `job_uuid` (str, optional): The job UUID for context in the message. - -**Example:** - -```python -ds.jobs.interpret_status("FINISHED", "12345678-1234-1234-1234-123456789abc") -# Job 12345678-1234-1234-1234-123456789abc completed successfully. -``` - ---- - -#### `list(app_id=None, status=None, limit=100, output="df", verbose=False)` - -List jobs with optional filtering. Fetches jobs from Tapis ordered by creation date (newest first). Filters are applied client-side. - -**Args:** -- `app_id` (str, optional): Filter by application ID. -- `status` (str, optional): Filter by job status (e.g., `"FINISHED"`). Case-insensitive. -- `limit` (int, optional): Maximum jobs to fetch. Defaults to `100`. -- `output` (str, optional): Output format. `"df"` for pandas DataFrame (default), `"list"` for list of dicts, `"raw"` for TapisResult objects. -- `verbose` (bool, optional): Print job count. Defaults to `False`. - -**Returns:** Depends on `output`: DataFrame, list of dicts, or list of TapisResult objects. - -**Raises:** -- `JobMonitorError`: If the Tapis API call fails. -- `ValueError`: If output format is not recognized. - -**Example:** - -```python -df = ds.jobs.list(app_id="matlab-r2023a", status="FINISHED") -jobs = ds.jobs.list(output="list") -raw = ds.jobs.list(limit=10, output="raw") -``` - ---- - -### SystemMethods - -Interface for Tapis system information and queue management. - -#### `queues(system_id, verbose=True)` - -List logical queues available on a Tapis execution system. - -**Args:** -- `system_id` (str): The ID of the execution system (e.g., `"frontera"`). -- `verbose` (bool, optional): If `True`, prints detailed queue information. Defaults to `True`. - -**Returns:** `List[Any]` -- List of queue objects with queue configuration details. - -**Raises:** -- `SystemInfoError`: If the system is not found or queue retrieval fails. -- `ValueError`: If `system_id` is empty. - -**Example:** - -```python -queues = ds.systems.queues("frontera") -``` - ---- - -#### `check_credentials(system_id, username=None)` - -Check whether TMS credentials exist for a user on a system. - -**Args:** -- `system_id` (str): The ID of the Tapis system (e.g., `"frontera"`). -- `username` (str, optional): Username to check. Defaults to the authenticated user. - -**Returns:** `bool` -- `True` if credentials exist, `False` otherwise. - -**Raises:** -- `CredentialError`: If the credential check fails unexpectedly. -- `ValueError`: If `system_id` is empty. - -**Example:** - -```python -has_creds = ds.systems.check_credentials("frontera") -``` - ---- - -#### `establish_credentials(system_id, username=None, force=False, verbose=True)` - -Establish TMS credentials for a user on a Tapis system. Idempotent: skips creation if credentials already exist (unless `force=True`). Only supported for systems using TMS_KEYS authentication. - -**Args:** -- `system_id` (str): The ID of the Tapis system (e.g., `"frontera"`). -- `username` (str, optional): Username. Defaults to the authenticated user. -- `force` (bool, optional): Re-create even if credentials exist. Defaults to `False`. -- `verbose` (bool, optional): Print status messages. Defaults to `True`. - -**Raises:** -- `CredentialError`: If the system is not TMS_KEYS or creation fails. -- `ValueError`: If `system_id` is empty. - -**Example:** - -```python -ds.systems.establish_credentials("frontera") -``` - ---- - -#### `revoke_credentials(system_id, username=None, verbose=True)` - -Remove TMS credentials for a user on a Tapis system. Idempotent: succeeds silently if credentials do not exist. - -**Args:** -- `system_id` (str): The ID of the Tapis system (e.g., `"frontera"`). -- `username` (str, optional): Username. Defaults to the authenticated user. -- `verbose` (bool, optional): Print status messages. Defaults to `True`. - -**Raises:** -- `CredentialError`: If credential removal fails unexpectedly. -- `ValueError`: If `system_id` is empty. - -**Example:** - -```python -ds.systems.revoke_credentials("frontera") -``` - ---- - -### ParametricSweepMethods - -Interface for PyLauncher parameter sweeps. Accessible via `ds.jobs.parametric_sweep`. - -#### `generate(command, sweep, directory=None, *, placeholder_style="token", debug=None, preview=False)` - -Generate PyLauncher sweep files or preview the parameter grid. - -With `preview=True`, returns a DataFrame of all parameter combinations -- no files are written. Otherwise, expands `command` into one command per combination and writes `runsList.txt` and `call_pylauncher.py` into `directory`. - -**Args:** -- `command` (str): Command template with placeholders matching sweep keys. -- `sweep` (Dict[str, Any]): Mapping of placeholder name to sequence of values. -- `directory` (str, optional): Directory to write files into (created if needed). Required when `preview` is `False`. -- `placeholder_style` (str, optional): `"token"` (default) for bare `ALPHA`, or `"braces"` for `{ALPHA}`. -- `debug` (str, optional): Optional debug string (e.g., `"host+job"`). -- `preview` (bool, optional): If `True`, return a DataFrame (dry run). - -**Returns:** `List[str]` of commands, or `pandas.DataFrame` when `preview` is `True`. - -**Example:** - -```python -# Preview the parameter grid -df = ds.jobs.parametric_sweep.generate( - command="python run.py --alpha ALPHA --beta BETA", - sweep={"ALPHA": [0.1, 0.5], "BETA": [1, 2]}, - preview=True, -) - -# Generate sweep files -commands = ds.jobs.parametric_sweep.generate( - command="python run.py --alpha ALPHA --beta BETA", - sweep={"ALPHA": [0.1, 0.5], "BETA": [1, 2]}, - directory="/MyData/sweep/", -) -``` - ---- - -#### `submit(directory, app_id, allocation, *, node_count=None, cores_per_node=None, max_minutes=None, queue=None, **kwargs)` - -Submit a PyLauncher sweep job. Translates `directory` to a Tapis URI, builds a job request with `call_pylauncher.py` as the script, and submits it. - -**Args:** -- `directory` (str): Path to the input directory containing `runsList.txt` and `call_pylauncher.py` (e.g., `"/MyData/sweep/"`). -- `app_id` (str): Tapis application ID (e.g., `"openseespy-s3"`). -- `allocation` (str): TACC allocation to charge. -- `node_count` (int, optional): Number of compute nodes. -- `cores_per_node` (int, optional): Cores per node. -- `max_minutes` (int, optional): Maximum runtime in minutes. -- `queue` (str, optional): Execution queue name. -- `**kwargs`: Additional arguments passed to `ds.jobs.generate()`. - -**Returns:** `SubmittedJob` -- A job object for monitoring via `.monitor()`. - -**Example:** - -```python -job = ds.jobs.parametric_sweep.submit( - directory="/MyData/sweep/", - app_id="openseespy-s3", - allocation="MyProject-123", - node_count=2, - max_minutes=60, -) -job.monitor() -``` diff --git a/docs/api/database.md b/docs/api/database.md deleted file mode 100644 index 1a6976c..0000000 --- a/docs/api/database.md +++ /dev/null @@ -1,190 +0,0 @@ -# Database - -Database connections and query execution for DesignSafe research databases. - -## Database Accessor - -### `DatabaseAccessor` - -```python -class dapi.db.accessor.DatabaseAccessor -``` - -Provides lazy access to different DesignSafe database connections via properties. - -This class manages multiple database connections and provides convenient property-based access to different DesignSafe databases. Each database connection is created only when first accessed (lazy initialization) and reused for subsequent calls. - -**Constructor:** - -```python -DatabaseAccessor() -``` - -Initializes the accessor with empty connection slots. No database connections are established until a property is first accessed. - -**Properties:** - -#### `ngl` - -```python -DatabaseAccessor.ngl -> DSDatabase -``` - -Access the NGL (Natural Hazards Engineering) database connection manager. Provides access to the `sjbrande_ngl_db` database containing natural hazards engineering research data. The connection is created on first access. - -#### `vp` - -```python -DatabaseAccessor.vp -> DSDatabase -``` - -Access the VP (Vulnerability and Performance) database connection manager. Provides access to the `sjbrande_vpdb` database containing vulnerability and performance analysis data. The connection is created on first access. - -#### `eq` - -```python -DatabaseAccessor.eq -> DSDatabase -``` - -Access the EQ (Post-Earthquake Recovery) database connection manager. Provides access to the `post_earthquake_recovery` database containing post-earthquake recovery research data. The connection is created on first access. - -**Methods:** - -#### `close_all` - -```python -DatabaseAccessor.close_all() -> None -``` - -Close all active database engines and their connection pools. This should be called when the `DatabaseAccessor` is no longer needed to prevent connection leaks. - -After calling `close_all()`, accessing any database property will create new connections since the instances are reset to `None`. - -**Example:** - -```python -accessor = DatabaseAccessor() - -# Access NGL database (created on first access) -ngl_db = accessor.ngl - -# Query the database -results = ngl_db.read_sql("SELECT COUNT(*) as total FROM users") - -# Close all connections when done -accessor.close_all() -``` - ---- - -## Database Engine - -### `DSDatabase` - -```python -class dapi.db.db.DSDatabase(dbname: str = "ngl") -``` - -Manages connection and querying for a specific DesignSafe database. - -Provides a high-level interface for connecting to preconfigured DesignSafe databases using SQLAlchemy with connection pooling. It supports environment-based configuration and provides query results in multiple formats. - -**Constructor Parameters:** - -| Name | Type | Default | Description | -|------|------|---------|-------------| -| `dbname` | `str` | `"ngl"` | Shorthand name for the database to connect to. Must be one of `"ngl"`, `"vp"`, or `"eq"`. | - -**Raises:** - -- `ValueError` -- If `dbname` is not a valid configured database name. -- `SQLAlchemyError` -- If database engine creation or connection fails. - -**Attributes:** - -| Name | Type | Description | -|------|------|-------------| -| `user` | `str` | Database username for authentication. | -| `password` | `str` | Database password for authentication. | -| `host` | `str` | Database host address. | -| `port` | `int` | Database port number. | -| `db` | `str` | Name of the connected database. | -| `dbname_short` | `str` | Shorthand name for the database. | -| `engine` | `sqlalchemy.Engine` | SQLAlchemy engine for database connections. | -| `Session` | `sqlalchemy.orm.sessionmaker` | Session factory for database operations. | - -**Methods:** - -#### `read_sql` - -```python -DSDatabase.read_sql(sql: str, output_type: str = "DataFrame") -> pd.DataFrame | list[dict] -``` - -Execute a SQL query using a dedicated session and return the results. - -Obtains a session from the connection pool, executes the provided SQL query, and returns results in the specified format. The session is automatically closed after execution, returning the connection to the pool. - -| Name | Type | Default | Description | -|------|------|---------|-------------| -| `sql` | `str` | *(required)* | The SQL query string to execute. | -| `output_type` | `str` | `"DataFrame"` | Format for query results. Must be `"DataFrame"` for a `pandas.DataFrame` or `"dict"` for a list of dictionaries. | - -**Returns:** - -- `pandas.DataFrame` when `output_type="DataFrame"` -- a DataFrame with column names as headers. -- `list[dict]` when `output_type="dict"` -- a list of dictionaries where each dict represents a row. - -**Raises:** - -- `ValueError` -- If `sql` is empty/`None` or `output_type` is not `"DataFrame"` or `"dict"`. -- `SQLAlchemyError` -- If a database error occurs during query execution. - -#### `close` - -```python -DSDatabase.close() -> None -``` - -Dispose of the SQLAlchemy engine and close all database connections. - -Properly shuts down the database engine and its connection pool. Call this when the database instance is no longer needed to prevent connection leaks and free up database resources. - -After calling `close()`, this `DSDatabase` instance should not be used for further database operations as the engine will be disposed. - -**Example:** - -```python -db = DSDatabase("ngl") -df = db.read_sql("SELECT * FROM table_name LIMIT 5") - -# Get dictionary results -results = db.read_sql("SELECT COUNT(*) as total FROM users", output_type="dict") - -db.close() -``` - ---- - -## Database Configuration - -### `db_config` - -```python -dapi.db.config.db_config: dict -``` - -A dictionary mapping shorthand database names to their configuration details. - -| Key | Database Name | Env Prefix | Description | -|-----|---------------|------------|-------------| -| `"ngl"` | `sjbrande_ngl_db` | `NGL_` | Natural hazards engineering research database | -| `"vp"` | `sjbrande_vpdb` | `VP_` | Vulnerability and performance database | -| `"eq"` | `post_earthquake_recovery` | `EQ_` | Post-earthquake recovery database | - -For each database, the following environment variables are checked (using the env prefix): - -- `{PREFIX}DB_USER` -- Database username (default: `"dspublic"`) -- `{PREFIX}DB_PASSWORD` -- Database password (default: `"R3ad0nlY"`) -- `{PREFIX}DB_HOST` -- Database host (default: `"129.114.52.174"`) -- `{PREFIX}DB_PORT` -- Database port (default: `3306`) diff --git a/docs/api/exceptions.md b/docs/api/exceptions.md deleted file mode 100644 index f8afeae..0000000 --- a/docs/api/exceptions.md +++ /dev/null @@ -1,176 +0,0 @@ -# Exceptions - -Custom exception classes for DAPI error handling and debugging. - -## Exception Hierarchy - -All exceptions inherit from Python's built-in `Exception` via `DapiException`: - -``` -Exception - └── DapiException - ├── AuthenticationError - ├── FileOperationError - ├── AppDiscoveryError - ├── SystemInfoError - ├── CredentialError - ├── JobSubmissionError - └── JobMonitorError -``` - -You can catch `DapiException` to handle any dapi-specific error, or catch a more specific subclass for targeted error handling. - ---- - -## Base Exception - -### `DapiException` - -```python -class dapi.exceptions.DapiException(message: str) -``` - -Base exception class for all dapi-related errors. - -This is the parent class for all custom exceptions in the dapi library. It can be used to catch any dapi-specific error or as a base for creating new custom exceptions. - -**Parameters:** - -| Name | Type | Description | -|------|------|-------------| -| `message` | `str` | Human-readable description of the error. | - ---- - -## Authentication Exceptions - -### `AuthenticationError` - -```python -class dapi.exceptions.AuthenticationError(message: str) -``` - -*Inherits from {py:class}`~dapi.exceptions.DapiException`.* - -Raised when authentication with Tapis fails. This includes invalid credentials, network connectivity problems, or Tapis service unavailability. - -**Raised by:** `dapi.auth.init()` when credentials are invalid, missing, or the Tapis service is unreachable. - ---- - -## File Operation Exceptions - -### `FileOperationError` - -```python -class dapi.exceptions.FileOperationError(message: str) -``` - -*Inherits from {py:class}`~dapi.exceptions.DapiException`.* - -Raised when file operations fail, including uploads, downloads, directory listings, path translations, and file existence checks. - ---- - -## Application Discovery Exceptions - -### `AppDiscoveryError` - -```python -class dapi.exceptions.AppDiscoveryError(message: str) -``` - -*Inherits from {py:class}`~dapi.exceptions.DapiException`.* - -Raised when searching for Tapis applications fails, when a specific application cannot be found, or when retrieving application details encounters an error. - ---- - -## System Information Exceptions - -### `SystemInfoError` - -```python -class dapi.exceptions.SystemInfoError(message: str) -``` - -*Inherits from {py:class}`~dapi.exceptions.DapiException`.* - -Raised when operations involving Tapis execution systems fail, such as retrieving system details, listing available queues, or checking system availability. - ---- - -## Credential Management Exceptions - -### `CredentialError` - -```python -class dapi.exceptions.CredentialError(message: str) -``` - -*Inherits from {py:class}`~dapi.exceptions.DapiException`.* - -Raised when credential management operations involving Tapis Managed Secrets (TMS) fail, such as checking, establishing, or revoking user credentials on a Tapis execution system. - ---- - -## Job Management Exceptions - -### `JobSubmissionError` - -```python -class dapi.exceptions.JobSubmissionError(message: str, request=None, response=None) -``` - -*Inherits from {py:class}`~dapi.exceptions.DapiException`.* - -Raised when job submission or validation fails. This includes errors during job request generation, validation, or submission to Tapis. It carries additional context about the HTTP request and response when available. - -**Parameters:** - -| Name | Type | Default | Description | -|------|------|---------|-------------| -| `message` | `str` | *(required)* | Description of the job submission failure. | -| `request` | `requests.Request` | `None` | The HTTP request object that failed. | -| `response` | `requests.Response` | `None` | The HTTP response object received. | - -**Attributes:** - -| Name | Type | Description | -|------|------|-------------| -| `request` | `requests.Request` | The failed HTTP request, if available. | -| `response` | `requests.Response` | The HTTP response received, if available. | - -The string representation includes request URL, method, response status code, and response body when available. - -**Example:** - -```python -try: - job = client.jobs.submit(job_request) -except JobSubmissionError as e: - print(f"Job submission failed: {e}") - if e.response: - print(f"Status code: {e.response.status_code}") -``` - ---- - -### `JobMonitorError` - -```python -class dapi.exceptions.JobMonitorError(message: str) -``` - -*Inherits from {py:class}`~dapi.exceptions.DapiException`.* - -Raised when job monitoring or management fails, including errors during job status monitoring, job cancellation, retrieving job details, or accessing job outputs. - -**Example:** - -```python -try: - status = job.monitor(timeout_minutes=60) -except JobMonitorError as e: - print(f"Job monitoring failed: {e}") -``` diff --git a/docs/api/files.md b/docs/api/files.md deleted file mode 100644 index 3c28de1..0000000 --- a/docs/api/files.md +++ /dev/null @@ -1,199 +0,0 @@ -# Files - -File operations and path translation utilities for DesignSafe storage systems. - -All functions below accept an authenticated Tapis client as the first argument. -When using the `DSClient`, the Tapis client is supplied automatically and the -methods are available under `ds.files`. - -| Module function | Client shorthand | -|---|---| -| `get_ds_path_uri(t, ...)` | `ds.files.to_uri(...)` | -| `tapis_uri_to_local_path(...)` | `ds.files.to_path(...)` | -| `upload_file(t, ...)` | `ds.files.upload(...)` | -| `download_file(t, ...)` | `ds.files.download(...)` | -| `list_files(t, ...)` | `ds.files.list(...)` | - ---- - -## Path Translation - -### `get_ds_path_uri(t, path, verify_exists=False)` - -Translate DesignSafe-style paths to Tapis URIs. - -Converts commonly used DesignSafe path formats (e.g., `/MyData/folder`, -`/projects/PRJ-XXXX/folder`) to their corresponding Tapis system URIs. -Supports MyData, CommunityData, and project-specific paths with automatic -system discovery for projects. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `path` (`str`): The DesignSafe-style path string to translate. Supported formats: - - MyData paths: `"/MyData/folder"`, `"jupyter/MyData/folder"`, `"/home/jupyter/MyData/folder"` - - Community paths: `"/CommunityData/folder"` - - Project paths: `"/projects/PRJ-XXXX/folder"` - - Direct Tapis URIs: `"tapis://system-id/path"` (passed through) -- `verify_exists` (`bool`, optional): If `True`, verifies the translated path exists on the target Tapis system. Defaults to `False`. - -**Returns:** `str` -- The corresponding Tapis URI (e.g., `"tapis://system-id/path"`). - -**Raises:** - -- `FileOperationError`: If path translation fails, project system lookup fails, or path verification fails (when `verify_exists=True`). -- `AuthenticationError`: If username is required for MyData paths but `t.username` is not available. -- `ValueError`: If the input path format is unrecognized, empty, or incomplete. - -**Example:** - -```python -from dapi.files import get_ds_path_uri - -uri = get_ds_path_uri(client, "/MyData/analysis/results") -# Translated '/MyData/analysis/results' to -# 'tapis://designsafe.storage.default/username/analysis/results' - -uri = get_ds_path_uri(client, "/projects/PRJ-1234/data", verify_exists=True) - -# Using DSClient: -uri = ds.files.to_uri("/MyData/analysis/results") -``` - ---- - -### `tapis_uri_to_local_path(tapis_uri)` - -Convert a Tapis URI to the corresponding DesignSafe local path. - -This is the reverse operation of `get_ds_path_uri()`. Converts Tapis system -URIs back to their equivalent DesignSafe local paths accessible in a Jupyter -environment. - -**Args:** - -- `tapis_uri` (`str`): The Tapis URI to convert. Supported formats: - - `"tapis://designsafe.storage.default/username/path"` -> `"/home/jupyter/MyData/path"` - - `"tapis://designsafe.storage.community/path"` -> `"/home/jupyter/CommunityData/path"` - - `"tapis://project-*/path"` -> `"/home/jupyter/MyProjects/path"` - -**Returns:** `str` -- The corresponding DesignSafe local path, or the original URI if it is not a recognized Tapis URI format. - -**Raises:** - -- `ValueError`: If the Tapis URI format is invalid. - -**Example:** - -```python -from dapi.files import tapis_uri_to_local_path - -local_path = tapis_uri_to_local_path( - "tapis://designsafe.storage.default/user/data/file.txt" -) -# "/home/jupyter/MyData/data/file.txt" - -local_path = tapis_uri_to_local_path( - "tapis://designsafe.storage.community/datasets/earthquake.csv" -) -# "/home/jupyter/CommunityData/datasets/earthquake.csv" - -# Using DSClient: -local_path = ds.files.to_path("tapis://designsafe.storage.default/user/data/file.txt") -``` - ---- - -## File Operations - -### `upload_file(t, local_path, remote_uri)` - -Upload a local file to a Tapis storage system. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `local_path` (`str`): Path to the local file to upload. -- `remote_uri` (`str`): Tapis URI destination (e.g., `"tapis://system/path/file.txt"`). - -**Raises:** - -- `FileNotFoundError`: If the local file does not exist. -- `ValueError`: If `local_path` is not a file or `remote_uri` is invalid. -- `FileOperationError`: If the Tapis upload operation fails. - -**Example:** - -```python -from dapi.files import upload_file - -upload_file(client, "/local/data.txt", "tapis://mysystem/uploads/data.txt") -# Uploading '/local/data.txt' to system 'mysystem' at path 'uploads/data.txt'... -# Upload complete. - -# Using DSClient: -ds.files.upload("/local/data.txt", "tapis://mysystem/uploads/data.txt") -``` - ---- - -### `download_file(t, remote_uri, local_path)` - -Download a file from a Tapis storage system to the local filesystem. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `remote_uri` (`str`): Tapis URI of the file to download (e.g., `"tapis://system/path/file.txt"`). -- `local_path` (`str`): Local filesystem path where the file should be saved. - -**Raises:** - -- `ValueError`: If `local_path` is a directory or `remote_uri` is invalid. -- `FileOperationError`: If the download operation fails or the remote file is not found. - -**Example:** - -```python -from dapi.files import download_file - -download_file(client, "tapis://mysystem/data/results.txt", "/local/results.txt") -# Downloading from system 'mysystem' path 'data/results.txt' to '/local/results.txt'... -# Download complete. - -# Using DSClient: -ds.files.download("tapis://mysystem/data/results.txt", "/local/results.txt") -``` - ---- - -### `list_files(t, remote_uri, limit=100, offset=0)` - -List files and directories in a Tapis storage system path. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `remote_uri` (`str`): Tapis URI of the directory to list (e.g., `"tapis://system/path/"`). -- `limit` (`int`, optional): Maximum number of items to return. Defaults to `100`. -- `offset` (`int`, optional): Number of items to skip (for pagination). Defaults to `0`. - -**Returns:** `List[Tapis]` -- List of file and directory objects from the specified path. Each object contains metadata like name, size, type, and permissions. - -**Raises:** - -- `ValueError`: If `remote_uri` is invalid. -- `FileOperationError`: If the listing operation fails or the path is not found. - -**Example:** - -```python -from dapi.files import list_files - -files = list_files(client, "tapis://mysystem/data/") -for f in files: - print(f"{f.name} ({f.type})") - -# Using DSClient: -files = ds.files.list("tapis://mysystem/data/") -``` diff --git a/docs/api/index.md b/docs/api/index.md deleted file mode 100644 index 2c8acbf..0000000 --- a/docs/api/index.md +++ /dev/null @@ -1,84 +0,0 @@ -# API Reference - -This section provides API documentation for all DAPI modules and classes. For the full auto-generated reference with signatures and type annotations, see the [Sphinx API docs](/api/). - -## Overview - -The DAPI package is organized into several core modules: - -### **Core Client** -- **[DSClient](client.md)** - Main client interface for all DAPI functionality - -### **Service Modules** -- **[Jobs](jobs.md)** - Job submission, monitoring, and management -- **[Launcher](launcher.md)** - PyLauncher parameter sweep utilities -- **[Files](files.md)** - File operations and path translation -- **[Apps](apps.md)** - Application discovery and details -- **[Systems](systems.md)** - System information and queue management -- **[Auth](auth.md)** - Authentication and credential management - -### **Database Access** -- **[Database](database.md)** - Database connections and query execution - -### **Utilities** -- **[Exceptions](exceptions.md)** - Custom exception classes - -## Quick Navigation - -### **Getting Started** -```python -from dapi import DSClient - -# Initialize client -ds = DSClient() - -# Access different services -ds.jobs.generate(...) -ds.files.upload(...) -ds.db.ngl.read_sql(...) -``` - -### **Common Operations** -- **Submit Jobs**: `ds.jobs.submit(job_dict)` -- **Monitor Jobs**: `submitted_job.monitor()` -- **File Upload**: `ds.files.upload(local_path, remote_uri)` -- **File Download**: `ds.files.download(remote_uri, local_path)` -- **Database Query**: `ds.db.ngl.read_sql("SELECT * FROM table")` - -### **Advanced Features** -- **Archive Management**: Custom job result organization -- **Path Translation**: Seamless local/cloud path conversion -- **Parametric Studies**: Batch job submission and monitoring -- **Error Handling**: Comprehensive exception hierarchy - -## Documentation Conventions - -### **Parameter Types** -- `Optional[Type]` - Parameter can be `None` -- `Union[Type1, Type2]` - Parameter accepts multiple types -- `List[Type]` - List containing elements of specified type -- `Dict[str, Any]` - Dictionary with string keys and any values - -### **Return Types** -- Methods clearly document return types and formats -- Async methods return appropriate async types -- Error conditions are documented in `Raises` sections - -### **Examples** -Each method includes practical usage examples showing: -- Basic usage patterns -- Parameter combinations -- Error handling -- Integration with other DAPI components - -## Cross-References - -The API documentation includes extensive cross-references: -- **Method signatures** link to parameter and return types -- **Related methods** are referenced in descriptions -- **Example workflows** demonstrate method integration -- **Error handling** shows exception hierarchies - ---- - -**Browse the API documentation using the navigation menu to explore specific modules and their functionality.** \ No newline at end of file diff --git a/docs/api/jobs.md b/docs/api/jobs.md deleted file mode 100644 index 86ff8cf..0000000 --- a/docs/api/jobs.md +++ /dev/null @@ -1,445 +0,0 @@ -# Jobs - -Job submission, monitoring, and management functionality for DesignSafe computational workflows. - -## Job Request Generation - -### `generate_job_request(tapis_client, app_id, input_dir_uri, script_filename=None, app_version=None, job_name=None, description=None, tags=None, max_minutes=None, node_count=None, cores_per_node=None, memory_mb=None, queue=None, allocation=None, archive_system=None, archive_path=None, extra_file_inputs=None, extra_app_args=None, extra_env_vars=None, extra_scheduler_options=None, script_param_names=["Input Script", "Main Script", "tclScript"], input_dir_param_name="Input Directory", allocation_param_name="TACC Allocation")` - -Generate a Tapis job request dictionary based on app definition and inputs. - -Creates a properly formatted job request dictionary by retrieving the specified -application details and applying user-provided overrides and additional parameters. -The function automatically maps the script filename (if provided) and input -directory to the appropriate app parameters. It dynamically reads the app definition -to detect parameter names, determines whether to use appArgs or envVariables, and -automatically populates all required parameters with default values when available. - -**Args:** -- `tapis_client` (Tapis): Authenticated Tapis client instance. -- `app_id` (str): The ID of the Tapis application to use for the job. -- `input_dir_uri` (str): Tapis URI to the input directory containing job files. -- `script_filename` (str, optional): Name of the main script file to execute. If `None` (default), no script parameter is added. Suitable for apps like OpenFOAM that don't take a script argument. -- `app_version` (str, optional): Specific app version to use. If `None`, uses latest. -- `job_name` (str, optional): Custom job name. If `None`, auto-generates based on app ID and timestamp. -- `description` (str, optional): Job description. If `None`, uses app description. -- `tags` (List[str], optional): List of tags to associate with the job. -- `max_minutes` (int, optional): Maximum runtime in minutes. Overrides app default. -- `node_count` (int, optional): Number of compute nodes. Overrides app default. -- `cores_per_node` (int, optional): Cores per node. Overrides app default. -- `memory_mb` (int, optional): Memory in MB. Overrides app default. -- `queue` (str, optional): Execution queue name. Overrides app default. -- `allocation` (str, optional): TACC allocation to charge for compute time. -- `archive_system` (str, optional): Archive system for job outputs. If `"designsafe"` is specified, uses `"designsafe.storage.default"`. If `None`, uses app default. -- `archive_path` (str, optional): Archive directory path. Can be a full path or just a directory name in MyData. If `None` and `archive_system` is `"designsafe"`, defaults to `"${EffectiveUserId}/tapis-jobs-archive/${JobCreateDate}/${JobUUID}"`. -- `extra_file_inputs` (List[Dict[str, Any]], optional): Additional file inputs beyond the main input directory. -- `extra_app_args` (List[Dict[str, Any]], optional): Additional application arguments for parameters expected in `appArgs`. -- `extra_env_vars` (List[Dict[str, Any]], optional): Additional environment variables for parameters expected in `envVariables` (e.g., OpenFOAM solver, mesh). Each item should be `{"key": "VAR_NAME", "value": "var_value"}`. -- `extra_scheduler_options` (List[Dict[str, Any]], optional): Additional scheduler options. -- `script_param_names` (List[str], optional): Parameter names/keys to check for script placement. Defaults to `["Input Script", "Main Script", "tclScript"]`. -- `input_dir_param_name` (str, optional): The name of the fileInput in the app definition that corresponds to `input_dir_uri`. Defaults to `"Input Directory"`. Auto-detected from the app definition. -- `allocation_param_name` (str, optional): Parameter name for TACC allocation. Defaults to `"TACC Allocation"`. - -**Returns:** `Dict[str, Any]` -- Complete job request dictionary ready for submission to Tapis. - -**Raises:** -- `AppDiscoveryError`: If the specified app cannot be found or details cannot be retrieved. -- `ValueError`: If required parameters are missing or invalid, or if `script_filename` is provided but no suitable placement can be found. -- `JobSubmissionError`: If unexpected errors occur during job request generation. - -**Example:** - -```python -from dapi.jobs import generate_job_request - -job_request = generate_job_request( - tapis_client=client, - app_id="matlab-r2023a", - input_dir_uri="tapis://designsafe.storage.default/username/input/", - script_filename="run_analysis.m", - max_minutes=120, - allocation="MyProject-123", -) -``` - -## Job Submission - -### `submit_job_request(tapis_client, job_request)` - -Submit a pre-generated job request dictionary to Tapis. - -Takes a complete job request dictionary (typically generated by `generate_job_request`) -and submits it to the Tapis jobs service for execution. Prints the job request -details before submission for debugging purposes. - -**Args:** -- `tapis_client` (Tapis): Authenticated Tapis client instance. -- `job_request` (Dict[str, Any]): Complete job request dictionary containing all necessary job parameters, file inputs, and configuration. - -**Returns:** `SubmittedJob` -- A SubmittedJob object for monitoring and managing the submitted job. - -**Raises:** -- `ValueError`: If `job_request` is not a dictionary. -- `JobSubmissionError`: If the Tapis job submission fails, with additional context from the HTTP request and response when available. - -**Example:** - -```python -from dapi.jobs import generate_job_request, submit_job_request - -job_request = generate_job_request(...) -submitted_job = submit_job_request(client, job_request) -# Job submitted successfully. UUID: 12345678-1234-1234-1234-123456789abc -``` - -## Job Monitoring - -### `get_job_status(t, job_uuid)` - -Get the current status of a job by UUID. - -Standalone convenience function that creates a temporary SubmittedJob instance -to retrieve the current status of an existing job. - -**Args:** -- `t` (Tapis): Authenticated Tapis client instance. -- `job_uuid` (str): The UUID of the job to check. - -**Returns:** `str` -- Current job status (e.g., `"QUEUED"`, `"RUNNING"`, `"FINISHED"`, `"FAILED"`). - -**Raises:** -- `JobMonitorError`: If status retrieval fails. -- `TypeError`: If `t` is not a Tapis instance. -- `ValueError`: If `job_uuid` is empty or invalid. - -**Example:** - -```python -from dapi.jobs import get_job_status - -status = get_job_status(client, "12345678-1234-1234-1234-123456789abc") -print(f"Job status: {status}") -``` - ---- - -### `get_runtime_summary(t, job_uuid, verbose=False)` - -Print a runtime summary for a job by UUID. - -Standalone convenience function that creates a temporary SubmittedJob instance -to analyze and print the runtime summary of an existing job. - -**Args:** -- `t` (Tapis): Authenticated Tapis client instance. -- `job_uuid` (str): The UUID of the job to analyze. -- `verbose` (bool, optional): If `True`, prints detailed job history events in addition to the runtime summary. Defaults to `False`. - -**Raises:** -- `JobMonitorError`: If job details cannot be retrieved. -- `TypeError`: If `t` is not a Tapis instance. -- `ValueError`: If `job_uuid` is empty or invalid. - -**Example:** - -```python -from dapi.jobs import get_runtime_summary - -get_runtime_summary(client, "12345678-1234-1234-1234-123456789abc") -# Runtime Summary -# --------------- -# QUEUED time: 00:05:30 -# RUNNING time: 01:23:45 -# TOTAL time: 01:29:15 -# --------------- -``` - ---- - -### `interpret_job_status(final_status, job_uuid=None)` - -Print a user-friendly interpretation of a job status. - -Provides human-readable explanations for various job status values, -including both standard Tapis states and special monitoring states. - -**Args:** -- `final_status` (str): The job status to interpret. Can be a standard Tapis status (`"FINISHED"`, `"FAILED"`, etc.) or a special monitoring status (`STATUS_TIMEOUT`, `STATUS_INTERRUPTED`, etc.). -- `job_uuid` (str, optional): The job UUID to include in the message for context. Defaults to `None`. - -**Example:** - -```python -from dapi.jobs import interpret_job_status - -interpret_job_status("FINISHED", "12345678-1234-1234-1234-123456789abc") -# Job 12345678-1234-1234-1234-123456789abc completed successfully. - -interpret_job_status("FAILED") -# Job failed. Check logs or job details. -``` - -## Listing Jobs - -### `list_jobs(tapis_client, app_id=None, status=None, limit=100, output="df", verbose=False)` - -Fetch Tapis jobs with optional filtering. - -Retrieves jobs from Tapis ordered by creation date (newest first) -and optionally filters by app ID and/or status. Filters are applied -client-side after fetching. - -**Args:** -- `tapis_client` (Tapis): Authenticated Tapis client instance. -- `app_id` (str, optional): Filter by application ID (e.g., `"opensees-mp-s3"`). -- `status` (str, optional): Filter by job status (e.g., `"FINISHED"`, `"FAILED"`). Case-insensitive. -- `limit` (int, optional): Maximum number of jobs to fetch from Tapis. Defaults to `100`. -- `output` (str, optional): Output format. `"df"` returns a pandas DataFrame (default), `"list"` returns a list of dicts, `"raw"` returns the raw TapisResult objects. -- `verbose` (bool, optional): If `True`, prints the number of jobs found. - -**Returns:** Depends on `output`: -- `"df"`: pandas DataFrame with formatted datetime columns. -- `"list"`: list of dicts with job metadata. -- `"raw"`: list of TapisResult objects as returned by the API. - -**Raises:** -- `JobMonitorError`: If the Tapis API call fails. -- `ValueError`: If output format is not recognized. - -**Example:** - -```python -from dapi.jobs import list_jobs - -df = list_jobs(client, app_id="matlab-r2023a", status="FINISHED") -jobs = list_jobs(client, output="list") -raw = list_jobs(client, limit=10, output="raw") -``` - -## SubmittedJob Class - -### `SubmittedJob(tapis_client, job_uuid)` - -Represents a submitted Tapis job with methods for monitoring and management. - -This class provides a high-level interface for interacting with Tapis jobs, -including status monitoring, output retrieval, job cancellation, and runtime -analysis. It caches job details and status to minimize API calls. - -**Args:** -- `tapis_client` (Tapis): Authenticated Tapis client instance. -- `job_uuid` (str): The UUID of an existing Tapis job. - -**Raises:** -- `TypeError`: If `tapis_client` is not a Tapis instance. -- `ValueError`: If `job_uuid` is empty or not a string. - -**Example:** - -```python -from dapi.jobs import SubmittedJob - -job = SubmittedJob(client, "12345678-1234-1234-1234-123456789abc") -status = job.status -if status in job.TERMINAL_STATES: - print("Job completed") -``` - -### Properties - -#### `uuid` - -`str` -- The unique identifier of the Tapis job. Set at initialization. - ---- - -#### `status` - -`str` -- The current job status, using cached value when appropriate. For terminal states, returns the cached status without making an API call. For non-terminal states, may fetch fresh status. Returns `STATUS_UNKNOWN` if status cannot be determined. - ---- - -#### `details` - -`Tapis` -- Complete job details object containing all job metadata, configuration, and current state information. Fetches from Tapis if not already cached. - ---- - -#### `last_message` - -`str` or `None` -- The last status message recorded for the job. Contains information about the current job state or errors. Returns `None` if not available or if retrieval fails. - ---- - -#### `archive_uri` - -`str` or `None` -- Tapis URI of the job's archive directory (e.g., `"tapis://designsafe.storage.default/user/tapis-jobs-archive/..."`). Returns `None` if archive information is not set. - -### Methods - -#### `get_status(force_refresh=True)` - -Get the current job status from Tapis API. - -**Args:** -- `force_refresh` (bool, optional): If `True`, always makes a fresh API call. If `False`, may return cached status. Defaults to `True`. - -**Returns:** `str` -- Current job status from Tapis API. - -**Raises:** -- `JobMonitorError`: If status cannot be retrieved from Tapis. - ---- - -#### `monitor(interval=15, timeout_minutes=None)` - -Monitor job status with progress bars until completion or timeout. - -Continuously monitors the job status using tqdm progress bars to show -progress through different job phases (waiting, running). Handles -interruptions and errors gracefully. - -**Args:** -- `interval` (int, optional): Status check interval in seconds. Defaults to `15`. -- `timeout_minutes` (int, optional): Maximum monitoring time in minutes. If `None`, uses the job's `maxMinutes` from its configuration. Use `-1` or `0` for unlimited monitoring. Defaults to `None`. - -**Returns:** `str` -- Final job status. Can be a standard Tapis status (`"FINISHED"`, `"FAILED"`, etc.) or a special monitoring status: -- `STATUS_TIMEOUT`: Monitoring timed out -- `STATUS_INTERRUPTED`: User interrupted monitoring (Ctrl+C) -- `STATUS_MONITOR_ERROR`: Error occurred during monitoring - -**Example:** - -```python -job = SubmittedJob(client, job_uuid) -final_status = job.monitor(interval=30, timeout_minutes=120) -if final_status == "FINISHED": - print("Job completed successfully!") -``` - ---- - -#### `print_runtime_summary(verbose=False)` - -Print a summary of job runtime phases and total execution time. - -Analyzes the job's execution history to show time spent in different -phases (queued, running) and calculates the total runtime. - -**Args:** -- `verbose` (bool, optional): If `True`, prints detailed job history events in addition to the runtime summary. Defaults to `False`. - -**Example:** - -```python -job.print_runtime_summary() -# Runtime Summary -# --------------- -# QUEUED time: 00:05:30 -# RUNNING time: 01:23:45 -# TOTAL time: 01:29:15 -# --------------- -``` - ---- - -#### `cancel()` - -Attempt to cancel the job execution. Jobs that are already in terminal states cannot be cancelled. - -**Raises:** -- `JobMonitorError`: If the cancellation request fails. - -**Example:** - -```python -job.cancel() -# Cancel request sent for job 12345678-... Status may take time to update. -``` - ---- - -#### `list_outputs(path="/", limit=100, offset=0)` - -List files and directories in the job's archive directory. - -**Args:** -- `path` (str, optional): Relative path within the job archive to list. Defaults to `"/"` (archive root). -- `limit` (int, optional): Maximum number of items to return. Defaults to `100`. -- `offset` (int, optional): Number of items to skip for pagination. Defaults to `0`. - -**Returns:** `List[Tapis]` -- List of file and directory objects in the specified path. - -**Raises:** -- `FileOperationError`: If archive information is not available or listing fails. - -**Example:** - -```python -outputs = job.list_outputs() -for item in outputs: - print(f"{item.name} ({item.type})") - -results = job.list_outputs(path="results/") -``` - ---- - -#### `get_output_content(output_filename, max_lines=None, missing_ok=True)` - -Retrieve the content of a specific output file from the job's archive. - -**Args:** -- `output_filename` (str): Name of the file in the job's archive root (e.g., `"tapisjob.out"`, `"tapisjob.err"`). -- `max_lines` (int, optional): If specified, returns only the last N lines of the file. Defaults to `None` (full file). -- `missing_ok` (bool, optional): If `True` and the file is not found, returns `None`. If `False`, raises `FileOperationError`. Defaults to `True`. - -**Returns:** `str` or `None` -- Content of the file as a string, or `None` if the file is not found and `missing_ok=True`. - -**Raises:** -- `FileOperationError`: If the archive is not available, the file is not found (and `missing_ok=False`), or fetching fails. - -**Example:** - -```python -# Get job output log -output = job.get_output_content("tapisjob.out") - -# Get last 50 lines of error log -errors = job.get_output_content("tapisjob.err", max_lines=50) - -# Require file to exist -results = job.get_output_content("results.txt", missing_ok=False) -``` - ---- - -#### `download_output(remote_path, local_target)` - -Download a specific file from the job's archive directory. - -**Args:** -- `remote_path` (str): Relative path to the file within the job archive. -- `local_target` (str): Local filesystem path where the file should be saved. - -**Raises:** -- `FileOperationError`: If archive information is not available or download fails. - -**Example:** - -```python -job.download_output("tapisjob.out", "/local/job_output.txt") -job.download_output("results/data.txt", "/local/results/data.txt") -``` - -## Status Constants - -Module-level constants used by the monitoring system: - -| Constant | Value | Description | -|---|---|---| -| `STATUS_TIMEOUT` | `"TIMEOUT"` | Monitoring timed out before the job reached a terminal state. | -| `STATUS_INTERRUPTED` | `"INTERRUPTED"` | User interrupted monitoring (e.g., Ctrl+C). | -| `STATUS_MONITOR_ERROR` | `"MONITOR_ERROR"` | An error occurred during the monitoring loop. | -| `STATUS_UNKNOWN` | `"UNKNOWN"` | Job status could not be determined. | -| `TAPIS_TERMINAL_STATES` | `["FINISHED", "FAILED", "CANCELLED", "STOPPED", "ARCHIVING_FAILED"]` | Standard Tapis states indicating a job has completed (successfully or not). | diff --git a/docs/api/launcher.md b/docs/api/launcher.md deleted file mode 100644 index a99446d..0000000 --- a/docs/api/launcher.md +++ /dev/null @@ -1,137 +0,0 @@ -# Launcher - -PyLauncher parameter sweep utilities for generating task lists and launcher scripts. - -## Generate Sweep - -### `generate_sweep(command, sweep, directory=None, *, placeholder_style="token", debug=None, preview=False)` - -Generate sweep commands and write PyLauncher input files. - -When `preview` is `True`, returns a DataFrame of all parameter combinations without writing any files -- useful for inspecting the sweep in a notebook before committing. - -When `preview` is `False` (default), expands `command` into one command per parameter combination and writes `runsList.txt` and `call_pylauncher.py` into `directory`. - -**Args:** -- `command` (str): Command template containing placeholders that match keys in `sweep`. Environment variables like `$WORK` or `$SLURM_JOB_ID` are left untouched. -- `sweep` (Mapping[str, Sequence[Any]]): Mapping of placeholder name to a sequence of values. Example: `{"ALPHA": [0.3, 0.5], "BETA": [1, 2]}`. -- `directory` (str | Path, optional): Directory to write files into. Created if it does not exist. Required when `preview` is `False`. -- `placeholder_style` (str, optional): How placeholders appear in `command`: - - `"token"` (default): bare tokens, e.g. `ALPHA` - - `"braces"`: brace-wrapped, e.g. `{ALPHA}` -- `debug` (str, optional): Optional debug string passed to `ClassicLauncher` (e.g., `"host+job"`). Ignored when `preview` is `True`. -- `preview` (bool, optional): If `True`, return a DataFrame of parameter combinations without writing files. Defaults to `False`. - -**Returns:** `List[str]` of generated commands when `preview` is `False`, or a `pandas.DataFrame` of parameter combinations when `True`. - -**Raises:** -- `TypeError`: If a sweep value is not a non-string sequence. -- `ValueError`: If a sweep value is empty, `placeholder_style` is invalid, or `directory` is missing when `preview` is `False`. - -**Example:** - -```python -from dapi.launcher import generate_sweep - -# Preview parameter combinations -df = generate_sweep( - command="python run.py --alpha ALPHA --beta BETA", - sweep={"ALPHA": [0.1, 0.5, 1.0], "BETA": [1, 2]}, - preview=True, -) -print(df) -# ALPHA BETA -# 0 0.1 1 -# 1 0.1 2 -# 2 0.5 1 -# 3 0.5 2 -# 4 1.0 1 -# 5 1.0 2 - -# Generate files for PyLauncher -commands = generate_sweep( - command="python run.py --alpha ALPHA --beta BETA", - sweep={"ALPHA": [0.1, 0.5, 1.0], "BETA": [1, 2]}, - directory="/home/jupyter/MyData/sweep/", -) -# Writes runsList.txt and call_pylauncher.py to the directory -``` - -## Client Interface - -The `ParametricSweepMethods` class is accessible via `ds.jobs.parametric_sweep` on a `DSClient` instance. It wraps `generate_sweep` and adds a `submit` method that handles Tapis URI translation and job submission. - -### `ParametricSweepMethods.generate(command, sweep, directory=None, *, placeholder_style="token", debug=None, preview=False)` - -Generate PyLauncher sweep files or preview the parameter grid. This is a convenience wrapper around `generate_sweep()`. - -**Args:** -- `command` (str): Command template with placeholders matching sweep keys. -- `sweep` (Dict[str, Any]): Mapping of placeholder name to sequence of values. -- `directory` (str, optional): Directory to write files into (created if needed). Required when `preview` is `False`. -- `placeholder_style` (str, optional): `"token"` (default) for bare `ALPHA`, or `"braces"` for `{ALPHA}`. -- `debug` (str, optional): Optional debug string (e.g., `"host+job"`). -- `preview` (bool, optional): If `True`, return a DataFrame (dry run). - -**Returns:** `List[str]` of commands, or `pandas.DataFrame` when `preview` is `True`. - -**Example:** - -```python -ds = DSClient() - -# Preview -df = ds.jobs.parametric_sweep.generate( - command="python run.py --alpha ALPHA", - sweep={"ALPHA": [0.1, 0.5, 1.0]}, - preview=True, -) - -# Write files -commands = ds.jobs.parametric_sweep.generate( - command="python run.py --alpha ALPHA", - sweep={"ALPHA": [0.1, 0.5, 1.0]}, - directory="/home/jupyter/MyData/sweep/", -) -``` - ---- - -### `ParametricSweepMethods.submit(directory, app_id, allocation, *, node_count=None, cores_per_node=None, max_minutes=None, queue=None, **kwargs)` - -Submit a PyLauncher sweep job. Translates `directory` to a Tapis URI, builds a job request with `call_pylauncher.py` as the script, and submits it. - -**Args:** -- `directory` (str): Path to the input directory containing `runsList.txt` and `call_pylauncher.py` (e.g., `"/MyData/sweep/"`). -- `app_id` (str): Tapis application ID (e.g., `"openseespy-s3"`). -- `allocation` (str): TACC allocation to charge. -- `node_count` (int, optional): Number of compute nodes. -- `cores_per_node` (int, optional): Cores per node. -- `max_minutes` (int, optional): Maximum runtime in minutes. -- `queue` (str, optional): Execution queue name. -- `**kwargs`: Additional arguments passed to `ds.jobs.generate()`. - -**Returns:** `SubmittedJob` -- A job object for monitoring via `.monitor()`. - -**Example:** - -```python -ds = DSClient() - -# Generate sweep files first -ds.jobs.parametric_sweep.generate( - command="python run.py --alpha ALPHA --beta BETA", - sweep={"ALPHA": [0.1, 0.5], "BETA": [1, 2]}, - directory="/home/jupyter/MyData/sweep/", -) - -# Submit the sweep job -job = ds.jobs.parametric_sweep.submit( - directory="/MyData/sweep/", - app_id="openseespy-s3", - allocation="MyProject-123", - node_count=2, - max_minutes=60, -) -job.monitor() -``` diff --git a/docs/api/systems.md b/docs/api/systems.md deleted file mode 100644 index 15733d2..0000000 --- a/docs/api/systems.md +++ /dev/null @@ -1,190 +0,0 @@ -# Systems - -System information, queue management, and TMS credential management for DesignSafe execution systems. - -All functions below accept an authenticated Tapis client as the first argument. -When using the `DSClient`, the Tapis client is supplied automatically and the -methods are available under `ds.systems`. - -| Module function | Client shorthand | -|---|---| -| `list_system_queues(t, ...)` | `ds.systems.queues(...)` | -| `check_credentials(t, ...)` | `ds.systems.check_credentials(...)` | -| `establish_credentials(t, ...)` | `ds.systems.establish_credentials(...)` | -| `revoke_credentials(t, ...)` | `ds.systems.revoke_credentials(...)` | -| `setup_tms_credentials(t, ...)` | *(called automatically during `DSClient` init)* | - ---- - -## System Queues - -### `list_system_queues(t, system_id, verbose=True)` - -Retrieve the list of batch logical queues available on a specific Tapis execution system. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `system_id` (`str`): The ID of the execution system (e.g., `"frontera"`, `"stampede3"`). -- `verbose` (`bool`, optional): If `True`, prints the found queues with details. Defaults to `True`. - -**Returns:** `List[Any]` -- A list of queue objects (typically `TapisResult` instances) defined for the system. Returns an empty list if the system exists but has no queues defined. - -**Raises:** - -- `SystemInfoError`: If the system is not found or an API error occurs. -- `ValueError`: If `system_id` is empty. - -**Example:** - -```python -from dapi.systems import list_system_queues - -queues = list_system_queues(client, "frontera") -# Fetching queue information for system 'frontera'... -# Found 3 batch logical queues for system 'frontera': -# - Name: normal (HPC Queue: normal, Max Jobs: 50, ...) -# - Name: development (HPC Queue: development, Max Jobs: 1, ...) - -# Using DSClient: -queues = ds.systems.queues("frontera") -``` - ---- - -## TMS Credential Management - -Manage Tapis Managed Secrets (TMS) credentials on execution systems. TMS credentials are SSH key pairs that allow Tapis to access TACC systems (Frontera, Stampede3, Lonestar6) on behalf of a user. They must be established once per system before submitting jobs. - -### `check_credentials(t, system_id, username=None)` - -Check whether TMS credentials exist for a user on a Tapis system. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `system_id` (`str`): The ID of the Tapis system (e.g., `"frontera"`, `"stampede3"`). -- `username` (`Optional[str]`, optional): The username to check. If `None`, auto-detected from `t.username`. Defaults to `None`. - -**Returns:** `bool` -- `True` if credentials exist, `False` if they do not. - -**Raises:** - -- `ValueError`: If `system_id` is empty or username cannot be determined. -- `CredentialError`: If an unexpected API error occurs during the check. - -**Example:** - -```python -from dapi.systems import check_credentials - -has_creds = check_credentials(client, "frontera") -print(has_creds) # True or False - -# Using DSClient: -has_creds = ds.systems.check_credentials("frontera") -``` - ---- - -### `establish_credentials(t, system_id, username=None, force=False, verbose=True)` - -Establish TMS credentials for a user on a Tapis system. - -Idempotent: if credentials already exist and `force` is `False`, no action is taken. -Only systems with `defaultAuthnMethod` set to `"TMS_KEYS"` are supported. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `system_id` (`str`): The ID of the Tapis system (e.g., `"frontera"`, `"stampede3"`). -- `username` (`Optional[str]`, optional): The username. If `None`, auto-detected from `t.username`. Defaults to `None`. -- `force` (`bool`, optional): If `True`, create credentials even if they already exist. Defaults to `False`. -- `verbose` (`bool`, optional): If `True`, prints status messages. Defaults to `True`. - -**Raises:** - -- `ValueError`: If `system_id` is empty or username cannot be determined. -- `CredentialError`: If the system does not use `TMS_KEYS`, if the system is not found, or if credential creation fails. - -**Example:** - -```python -from dapi.systems import establish_credentials - -establish_credentials(client, "frontera") -# TMS credentials established for user 'myuser' on system 'frontera'. - -# Force re-creation: -establish_credentials(client, "frontera", force=True) - -# Using DSClient: -ds.systems.establish_credentials("frontera") -``` - ---- - -### `revoke_credentials(t, system_id, username=None, verbose=True)` - -Remove TMS credentials for a user on a Tapis system. - -Idempotent: if credentials do not exist, no error is raised. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `system_id` (`str`): The ID of the Tapis system (e.g., `"frontera"`, `"stampede3"`). -- `username` (`Optional[str]`, optional): The username. If `None`, auto-detected from `t.username`. Defaults to `None`. -- `verbose` (`bool`, optional): If `True`, prints status messages. Defaults to `True`. - -**Raises:** - -- `ValueError`: If `system_id` is empty or username cannot be determined. -- `CredentialError`: If credential removal fails unexpectedly. - -**Example:** - -```python -from dapi.systems import revoke_credentials - -revoke_credentials(client, "frontera") -# Credentials revoked for user 'myuser' on system 'frontera'. - -# Using DSClient: -ds.systems.revoke_credentials("frontera") -``` - ---- - -### `setup_tms_credentials(t, systems=None)` - -Check and establish TMS credentials on execution systems. - -For each system, checks if credentials exist and creates them if missing. -Failures are handled gracefully -- a system that cannot be reached or where -the user lacks an allocation is skipped with a warning. - -This function is called automatically during `DSClient` initialization for the -default TACC systems. - -**Args:** - -- `t` (`Tapis`): Authenticated Tapis client instance. -- `systems` (`Optional[List[str]]`, optional): List of system IDs to set up. Defaults to `TACC_SYSTEMS` (`["frontera", "stampede3", "ls6"]`). - -**Returns:** `Dict[str, str]` -- A dictionary mapping each `system_id` to its status: `"ready"` (credentials already existed), `"created"` (newly established), or `"skipped"` (system unreachable or not TMS_KEYS). - -**Example:** - -```python -from dapi.systems import setup_tms_credentials - -results = setup_tms_credentials(client) -# TMS credentials ready: frontera, stampede3 (newly created: stampede3) -# TMS credentials skipped: ls6 -print(results) -# {'frontera': 'ready', 'stampede3': 'created', 'ls6': 'skipped'} - -# With custom system list: -results = setup_tms_credentials(client, systems=["frontera"]) -``` diff --git a/docs/authentication.md b/docs/authentication.md index 7dc3438..6e987b7 100644 --- a/docs/authentication.md +++ b/docs/authentication.md @@ -1,51 +1,28 @@ # Authentication -This guide explains how to authenticate with DesignSafe using the dapi library. Authentication is required to access DesignSafe resources and submit jobs. +dapi authenticates with DesignSafe via the TAPIS v3 API. Credentials are resolved in this order: -## Overview +1. Explicit parameters passed to `DSClient()` +2. Environment variables (`DESIGNSAFE_USERNAME`, `DESIGNSAFE_PASSWORD`) +3. `.env` file in your project directory +4. Interactive prompts -dapi uses your DesignSafe credentials to authenticate with the TAPIS v3 API. The library supports multiple methods for providing credentials, following a secure credential resolution hierarchy. - -## Credential Resolution Hierarchy - -dapi looks for credentials in the following order: - -1. **Explicit parameters** passed to `DSClient()` -2. **Environment variables** (`DESIGNSAFE_USERNAME`, `DESIGNSAFE_PASSWORD`) -3. **`.env` file** in your project directory -4. **Interactive prompts** for missing credentials - -## Authentication Methods - -### Method 1: Environment Variables (Recommended) - -Set environment variables in your shell: +## Environment Variables ```bash export DESIGNSAFE_USERNAME="your_username" export DESIGNSAFE_PASSWORD="your_password" ``` -Then initialize the client: - ```python from dapi import DSClient -# Automatically uses environment variables -ds = DSClient() +ds = DSClient() # uses environment variables ``` -#### Persistent Environment Variables +To persist across sessions, add the exports to `~/.bashrc` or `~/.zshrc`. -Add to your shell configuration file (`~/.bashrc`, `~/.zshrc`, etc.): - -```bash -# Add these lines to your shell config -export DESIGNSAFE_USERNAME="your_username" -export DESIGNSAFE_PASSWORD="your_password" -``` - -### Method 2: .env File (Recommended for Projects) +## .env File Create a `.env` file in your project root: @@ -56,147 +33,51 @@ DESIGNSAFE_PASSWORD=your_password ``` :::{warning} Security Note -Never commit `.env` files to version control. Add `.env` to your `.gitignore` file. +Never commit `.env` files to version control. Add `.env` to your `.gitignore`. ::: -Initialize the client: - ```python from dapi import DSClient -# Automatically loads from .env file -ds = DSClient() +ds = DSClient() # loads from .env -# Or specify a custom .env file path +# Or specify a custom path ds = DSClient(env_file="path/to/custom.env") ``` -### Method 3: Interactive Prompts +## Interactive Prompts -If no credentials are found, dapi will prompt you: +If no credentials are found, dapi prompts for them: ```python -from dapi import DSClient - ds = DSClient() -# Output: # Enter DesignSafe Username: your_username # Enter DesignSafe Password: [hidden input] # Authentication successful. ``` -### Method 4: Explicit Parameters - -Pass credentials directly (not recommended for production): +## Explicit Parameters ```python -from dapi import DSClient - ds = DSClient( username="your_username", password="your_password" ) ``` -## Security Best Practices - -### 1. Use Environment Variables or .env Files -```python -# Good - uses environment variables -ds = DSClient() - -# Avoid - credentials in code -ds = DSClient(username="user", password="pass") -``` - -### 2. Protect Your .env File -```bash -# Add to .gitignore -echo ".env" >> .gitignore - -# Set restrictive permissions (Unix/Linux/macOS) -chmod 600 .env -``` - -### 3. Use Strong Passwords -- Use your DesignSafe account password -- Enable two-factor authentication on your DesignSafe account - -### 4. Rotate Credentials Regularly -- Change your DesignSafe password periodically -- Update stored credentials when changed - -## DesignSafe Jupyter Environment - -### Setting Environment Variables in Jupyter - -```python -import os - -# Set for current session -os.environ['DESIGNSAFE_USERNAME'] = 'your_username' -os.environ['DESIGNSAFE_PASSWORD'] = 'your_password' - -from dapi import DSClient -ds = DSClient() -``` - -### Using .env Files in Jupyter - -Create a `.env` file in your notebook directory: - -```python -# Create .env file programmatically -with open('.env', 'w') as f: - f.write('DESIGNSAFE_USERNAME=your_username\n') - f.write('DESIGNSAFE_PASSWORD=your_password\n') - -from dapi import DSClient -ds = DSClient() -``` - -## Advanced Configuration - -### Custom Base URL - -```python -from dapi import DSClient - -ds = DSClient( - base_url="https://designsafe.tapis.io", # Default - username="your_username", - password="your_password" -) -``` - -### Multiple .env Files - -```python -from dapi import DSClient - -# Development environment -dev_ds = DSClient(env_file=".env.development") - -# Production environment -prod_ds = DSClient(env_file=".env.production") -``` - ## TMS Credentials (Execution System Access) -After authenticating with DesignSafe, you also need **TMS credentials** on any execution system where you plan to submit jobs. TMS (Trust Management System) manages SSH key pairs that allow Tapis to access TACC systems (Frontera, Stampede3, Lonestar6) on your behalf. +After authenticating with DesignSafe, you need TMS credentials on execution systems where you plan to submit jobs. TMS manages SSH key pairs that allow Tapis to access TACC systems (Frontera, Stampede3, Lonestar6) on your behalf. :::{note} One-time setup -TMS credentials only need to be established **once per system**. After that, they persist until you revoke them. +TMS credentials only need to be established once per system. After that, they persist until you revoke them. ::: ### Establish Credentials ```python -from dapi import DSClient - ds = DSClient() -# Establish TMS credentials on execution systems ds.systems.establish_credentials("frontera") ds.systems.establish_credentials("stampede3") ds.systems.establish_credentials("ls6") @@ -211,7 +92,6 @@ ds.systems.establish_credentials("frontera", force=True) ### Check Credentials ```python -# Check if credentials exist before submitting a job if ds.systems.check_credentials("frontera"): print("Ready to submit jobs on Frontera") else: @@ -221,24 +101,16 @@ else: ### Revoke Credentials ```python -# Remove credentials (e.g., to reset keys) ds.systems.revoke_credentials("frontera") ``` ### Using TMS from Outside DesignSafe -TMS credentials work from any environment -- not just DesignSafe JupyterHub. As long as you can authenticate with Tapis (e.g., via `.env` file), you can establish and manage TMS credentials from your laptop, CI/CD pipelines, or any Python script: - -```bash -# .env file -DESIGNSAFE_USERNAME=your_username -DESIGNSAFE_PASSWORD=your_password -``` +TMS credentials work from any environment -- not just DesignSafe JupyterHub. As long as you can authenticate with Tapis (e.g., via `.env` file), you can manage TMS credentials from your laptop, CI/CD pipelines, or any Python script: ```python from dapi import DSClient -# Works from anywhere with network access to designsafe.tapis.io ds = DSClient() ds.systems.establish_credentials("frontera") @@ -249,97 +121,69 @@ job = ds.jobs.submit(job_request) ### Troubleshooting TMS -#### Non-TMS System +**Non-TMS System:** ``` CredentialError: System 'my-system' uses authentication method 'PASSWORD', not 'TMS_KEYS'. ``` -**Solution**: TMS credential management only works for systems configured with `TMS_KEYS` authentication. TACC execution systems (frontera, stampede3, ls6) use TMS_KEYS. +TMS credential management only works for systems configured with `TMS_KEYS` authentication. TACC execution systems (frontera, stampede3, ls6) use TMS_KEYS. -#### System Not Found +**System Not Found:** ``` CredentialError: System 'nonexistent' not found. ``` -**Solution**: Verify the system ID. Common system IDs: `frontera`, `stampede3`, `ls6`. +Verify the system ID. Common system IDs: `frontera`, `stampede3`, `ls6`. -## Verifying Authentication +## Database Connections -### Check Authentication Status +Database connections use built-in public read-only credentials by default -- no `.env` setup is required. To override the defaults (e.g., for a private database instance), set environment variables: -```python -from dapi import DSClient +```bash +# Optional: override database credentials +NGL_DB_USER=your_user +NGL_DB_PASSWORD=your_password +NGL_DB_HOST=your_host +NGL_DB_PORT=3306 +``` + +The same pattern applies for VP (`VP_DB_*`) and Earthquake Recovery (`EQ_DB_*`) databases. -try: - ds = DSClient() - print("Authentication successful!") +## JWT Token Expiration - # Test API access - apps = ds.apps.find("", verbose=False) - print(f"API access confirmed. Found {len(apps)} apps.") - -except Exception as e: - print(f"Authentication failed: {e}") +Long-running sessions may encounter token expiration: + +``` +UnauthorizedError: message: b'TAPIS_SECURITY_JWT_EXPIRED ...' ``` -### Test Database Access +Reinitialize your client to refresh tokens: ```python -# Test database authentication -try: - df = ds.db.ngl.read_sql("SELECT COUNT(*) FROM SITE") - print("Database access confirmed") -except Exception as e: - print(f"Database access failed: {e}") +ds = DSClient() ``` -## Troubleshooting +Tapis tokens have a limited lifespan. Long-running notebooks or scripts will hit this after several hours. -### Common Authentication Issues +## Troubleshooting -#### Invalid Credentials +**Invalid credentials:** ``` AuthenticationError: Tapis authentication failed ``` -**Solution**: Verify your DesignSafe username and password +Verify your DesignSafe username and password. -#### Network Issues +**Network issues:** ``` AuthenticationError: An unexpected error occurred during authentication ``` -**Solution**: Check internet connection and DesignSafe service status +Check your internet connection and DesignSafe service status. -#### Environment Variable Issues -``` -Enter DesignSafe Username: -``` -**Solution**: Verify environment variables are set correctly +**Environment variables not detected:** ```bash echo $DESIGNSAFE_USERNAME echo $DESIGNSAFE_PASSWORD ``` -#### .env File Not Found -```python -# Verify .env file exists and is readable -import os -print(os.path.exists('.env')) -print(os.access('.env', os.R_OK)) -``` - -### Database Connection Issues - -Database connections use built-in public read-only credentials by default -- no `.env` setup is required for database access. If you need to override the defaults (e.g., for a private database instance), you can set environment variables: - -```bash -# Optional: override database credentials via .env or environment -NGL_DB_USER=your_user -NGL_DB_PASSWORD=your_password -NGL_DB_HOST=your_host -NGL_DB_PORT=3306 -``` - -The same pattern applies for VP (`VP_DB_*`) and Earthquake Recovery (`EQ_DB_*`) databases. - -## Example: Complete Setup +## Complete Setup Example ```python # 1. Create .env file (only Tapis credentials required) @@ -350,42 +194,4 @@ with open('.env', 'w') as f: # 2. Initialize client (auto-sets up TMS credentials) from dapi import DSClient ds = DSClient() - -# 3. Test -apps = ds.apps.find("matlab", verbose=False) -print(f"Found {len(apps)} MATLAB apps") - -# Database works out of the box -- no extra credentials needed -df = ds.db.ngl.read_sql("SELECT COUNT(*) FROM SITE") -print(f"NGL database has {df.iloc[0, 0]} sites") -``` - -## Troubleshooting - -### JWT Token Expiration - -If you encounter JWT token expiration errors during long-running sessions, you'll see an error like: - -``` -UnauthorizedError: message: b'TAPIS_SECURITY_JWT_EXPIRED Exception message: JWT expired at 2025-06-09T08:51:38Z. Current time: 2025-06-09T12:06:54Z, a difference of 11716617 milliseconds. Allowed clock skew: 0 milliseconds. Claims: iss: https://designsafe.tapis.io/v3/tokens sub: username@designsafe tapis/tenant_id: designsafe tapis/username: username tapis/account_type: user' -``` - -**Solution:** Simply reinitialize your DSClient to refresh the authentication tokens: - -```python -# Reinitialize the client to refresh tokens -ds = DSClient() ``` - -This will automatically handle token refresh and you can continue with your work. - -**Why this happens:** Tapis authentication tokens have a limited lifespan for security purposes. Long-running Jupyter notebooks or scripts may encounter this after several hours of use. - -## Next Steps - -After setting up authentication: - -1. **[Try the quick start guide](quickstart.md)** for your first workflow -2. **[Submit your first job](jobs.md)** using the jobs interface -3. **[Query databases](database.md)** for research data -4. **[Explore examples](examples/mpm.md)** for detailed workflows \ No newline at end of file diff --git a/docs/database.md b/docs/database.md index 7372111..f149a8c 100644 --- a/docs/database.md +++ b/docs/database.md @@ -1,42 +1,35 @@ # Database Access -This guide covers how to access and query DesignSafe research databases using dapi. DesignSafe provides access to several important research databases for earthquake engineering, geotechnical engineering, and natural hazards research. +dapi connects to three DesignSafe research databases: -## Available Databases - -dapi provides access to three major research databases: - -| Database | Code | Description | Domain | -|----------|------|-------------|---------| -| **NGL** | `ngl` | Next Generation Liquefaction database | Geotechnical/Liquefaction | -| **Earthquake Recovery** | `eq` | Post-earthquake recovery database | Social/Economic impacts | -| **VP** | `vp` | Validation Portal database | Model validation | +| Database | Code | Domain | +|----------|------|--------| +| NGL | `ngl` | Geotechnical / Liquefaction | +| Earthquake Recovery | `eq` | Social / Economic impacts | +| VP | `vp` | Model validation | ## Quick Start ```python from dapi import DSClient -# Initialize client ds = DSClient() -# Query NGL database df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") print(df) -# Query with parameters +# Parameterized query site_data = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_NAME = %s", params=["Amagasaki"] ) -print(site_data) ``` -## Database Authentication +## Authentication -### Environment Variables +Database connections use built-in public read-only credentials by default -- no setup required. -Database access requires additional authentication. Set these environment variables: +To override (e.g., for a private database instance), set environment variables: ```bash # NGL Database @@ -44,91 +37,44 @@ export NGL_DB_USER="dspublic" export NGL_DB_PASSWORD="your_password" export NGL_DB_HOST="database_host" export NGL_DB_PORT="3306" - -# VP Database -export VP_DB_USER="dspublic" -export VP_DB_PASSWORD="your_password" -export VP_DB_HOST="database_host" -export VP_DB_PORT="3306" - -# Earthquake Recovery Database -export EQ_DB_USER="dspublic" -export EQ_DB_PASSWORD="your_password" -export EQ_DB_HOST="database_host" -export EQ_DB_PORT="3306" ``` -### Using .env Files - -Create a `.env` file in your project: +Same pattern for VP (`VP_DB_*`) and Earthquake Recovery (`EQ_DB_*`). -```bash -# .env file -DESIGNSAFE_USERNAME=your_username -DESIGNSAFE_PASSWORD=your_password - -# Database credentials -NGL_DB_USER=dspublic -NGL_DB_PASSWORD=your_db_password -NGL_DB_HOST=database_host -NGL_DB_PORT=3306 - -VP_DB_USER=dspublic -VP_DB_PASSWORD=your_db_password -VP_DB_HOST=database_host -VP_DB_PORT=3306 - -EQ_DB_USER=dspublic -EQ_DB_PASSWORD=your_db_password -EQ_DB_HOST=database_host -EQ_DB_PORT=3306 -``` +## Querying -## Basic Querying - -### Simple Queries +### Basic Queries ```python -from dapi import DSClient - -ds = DSClient() - -# Count records in NGL database count_df = ds.db.ngl.read_sql("SELECT COUNT(*) as total_sites FROM SITE") print(f"Total sites: {count_df['total_sites'].iloc[0]}") -# Get first 10 sites sites_df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 10") -print(sites_df) -# Get site information site_info = ds.db.ngl.read_sql(""" - SELECT SITE_NAME, SITE_LAT, SITE_LON, SITE_GEOL - FROM SITE - WHERE SITE_LAT > 35 + SELECT SITE_NAME, SITE_LAT, SITE_LON, SITE_GEOL + FROM SITE + WHERE SITE_LAT > 35 ORDER BY SITE_NAME """) -print(site_info) ``` ### Parameterized Queries ```python -# Query with single parameter -site_name = "Amagasaki" +# Single parameter site_data = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_NAME = %s", params=[site_name] ) -# Query with multiple parameters -min_lat, max_lat = 32.0, 38.0 +# Multiple parameters california_sites = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_LAT BETWEEN %s AND %s", - params=[min_lat, max_lat] + params=[32.0, 38.0] ) -# Query with named parameters (dictionary) +# Named parameters region_sites = ds.db.ngl.read_sql( "SELECT * FROM SITE WHERE SITE_LAT > %(min_lat)s AND SITE_LON < %(max_lon)s", params={"min_lat": 35.0, "max_lon": -115.0} @@ -137,30 +83,24 @@ region_sites = ds.db.ngl.read_sql( ## NGL Database (Next Generation Liquefaction) -The NGL database contains comprehensive data on soil liquefaction case histories. - -### Key Tables +### Exploring Tables ```python -# Explore database structure tables_info = ds.db.ngl.read_sql("SHOW TABLES") -print("Available tables:") print(tables_info) -# Get table structure site_structure = ds.db.ngl.read_sql("DESCRIBE SITE") -print("SITE table structure:") print(site_structure) ``` -### Common NGL Queries +### Example Queries ```python -# Site information +# Active sites sites = ds.db.ngl.read_sql(""" SELECT SITE_ID, SITE_NAME, SITE_LAT, SITE_LON, SITE_GEOL FROM SITE - WHERE SITE_STAT = 1 -- Active sites only + WHERE SITE_STAT = 1 ORDER BY SITE_NAME """) @@ -173,7 +113,7 @@ liquefaction_sites = ds.db.ngl.read_sql(""" ORDER BY s.SITE_NAME """) -# Earthquake events +# Recent earthquakes earthquakes = ds.db.ngl.read_sql(""" SELECT DISTINCT EVENT_NAME, EVENT_DATE, EVENT_MAG FROM EVENT @@ -184,7 +124,7 @@ earthquakes = ds.db.ngl.read_sql(""" # CPT data summary cpt_summary = ds.db.ngl.read_sql(""" - SELECT + SELECT COUNT(*) as total_cpts, AVG(CPT_DEPTH) as avg_depth, MIN(CPT_DEPTH) as min_depth, @@ -194,12 +134,12 @@ cpt_summary = ds.db.ngl.read_sql(""" """) ``` -### Advanced NGL Analysis +### Joins ```python -# Sites with high liquefaction potential +# Sites with multiple liquefaction events high_risk_sites = ds.db.ngl.read_sql(""" - SELECT + SELECT s.SITE_NAME, s.SITE_LAT, s.SITE_LON, @@ -214,37 +154,13 @@ high_risk_sites = ds.db.ngl.read_sql(""" HAVING liquefaction_events > 2 ORDER BY liquefaction_events DESC, avg_magnitude DESC """) - -# Correlation between soil properties and liquefaction -soil_correlation = ds.db.ngl.read_sql(""" - SELECT - cpt.CPT_FC as fines_content, - cpt.CPT_D50 as median_grain_size, - COUNT(l.LIQ_ID) as liquefaction_cases, - AVG(e.EVENT_MAG) as avg_magnitude - FROM CPT cpt - JOIN RECORD r ON cpt.RECORD_ID = r.RECORD_ID - LEFT JOIN LIQUEFACTION l ON r.RECORD_ID = l.RECORD_ID - JOIN EVENT e ON r.EVENT_ID = e.EVENT_ID - WHERE cpt.CPT_STAT = 1 AND r.RECORD_STAT = 1 - AND cpt.CPT_FC IS NOT NULL AND cpt.CPT_D50 IS NOT NULL - GROUP BY - ROUND(cpt.CPT_FC, 1), - ROUND(cpt.CPT_D50, 2) - ORDER BY fines_content, median_grain_size -""") ``` ## Earthquake Recovery Database -The earthquake recovery database contains data on post-earthquake recovery processes. - -### Common EQ Queries - ```python -# Recovery milestones recovery_data = ds.db.eq.read_sql(""" - SELECT + SELECT event_name, recovery_metric, recovery_time_days, @@ -253,30 +169,13 @@ recovery_data = ds.db.eq.read_sql(""" WHERE recovery_time_days IS NOT NULL ORDER BY event_name, recovery_time_days """) - -# Economic impact analysis -economic_impact = ds.db.eq.read_sql(""" - SELECT - region, - AVG(economic_loss_millions) as avg_loss, - SUM(displaced_households) as total_displaced, - COUNT(*) as num_events - FROM economic_impacts - GROUP BY region - ORDER BY avg_loss DESC -""") ``` ## VP Database (Validation Portal) -The VP database contains model validation data and benchmarks. - -### Common VP Queries - ```python -# Model performance metrics model_performance = ds.db.vp.read_sql(""" - SELECT + SELECT model_name, benchmark_case, rmse_error, @@ -286,207 +185,65 @@ model_performance = ds.db.vp.read_sql(""" WHERE validation_score IS NOT NULL ORDER BY validation_score DESC """) - -# Benchmark cases -benchmarks = ds.db.vp.read_sql(""" - SELECT - benchmark_id, - benchmark_name, - category, - difficulty_level, - num_participants - FROM benchmarks - ORDER BY category, difficulty_level -""") -``` - -## Data Analysis Patterns - -### Statistical Analysis - -```python -import pandas as pd -import matplotlib.pyplot as plt - -# Get site data for analysis -sites_df = ds.db.ngl.read_sql(""" - SELECT SITE_LAT, SITE_LON, SITE_GEOL - FROM SITE - WHERE SITE_STAT = 1 AND SITE_LAT IS NOT NULL -""") - -# Basic statistics -print("Site distribution by geology:") -geology_counts = sites_df['SITE_GEOL'].value_counts() -print(geology_counts) - -# Geographic distribution -print(f"Latitude range: {sites_df['SITE_LAT'].min():.2f} to {sites_df['SITE_LAT'].max():.2f}") -print(f"Longitude range: {sites_df['SITE_LON'].min():.2f} to {sites_df['SITE_LON'].max():.2f}") - -# Export for further analysis -sites_df.to_csv("ngl_sites.csv", index=False) ``` -### Time Series Analysis +## Export ```python -# Earthquake timeline -earthquake_timeline = ds.db.ngl.read_sql(""" - SELECT - EVENT_DATE, - EVENT_NAME, - EVENT_MAG, - COUNT(r.RECORD_ID) as num_records - FROM EVENT e - LEFT JOIN RECORD r ON e.EVENT_ID = r.EVENT_ID - WHERE e.EVENT_STAT = 1 AND e.EVENT_DATE IS NOT NULL - GROUP BY e.EVENT_ID - ORDER BY e.EVENT_DATE +df = ds.db.ngl.read_sql(""" + SELECT s.SITE_NAME, s.SITE_LAT, s.SITE_LON, e.EVENT_NAME, e.EVENT_MAG + FROM SITE s + JOIN RECORD r ON s.SITE_ID = r.SITE_ID + JOIN EVENT e ON r.EVENT_ID = e.EVENT_ID + WHERE s.SITE_STAT = 1 AND r.RECORD_STAT = 1 """) -# Convert date column -earthquake_timeline['EVENT_DATE'] = pd.to_datetime(earthquake_timeline['EVENT_DATE']) - -# Analyze earthquake frequency by decade -earthquake_timeline['decade'] = (earthquake_timeline['EVENT_DATE'].dt.year // 10) * 10 -decade_summary = earthquake_timeline.groupby('decade').agg({ - 'EVENT_NAME': 'count', - 'EVENT_MAG': 'mean', - 'num_records': 'sum' -}).rename(columns={'EVENT_NAME': 'earthquake_count'}) - -print("Earthquake data by decade:") -print(decade_summary) -``` - -### Geospatial Analysis +df.to_csv("ngl_data.csv", index=False) +df.to_excel("ngl_data.xlsx", index=False) +df.to_json("ngl_data.json", orient="records") -```python -# Sites by geographic region -regional_analysis = ds.db.ngl.read_sql(""" - SELECT - CASE - WHEN SITE_LAT > 40 THEN 'Northern' - WHEN SITE_LAT > 35 THEN 'Central' - ELSE 'Southern' - END as region, - CASE - WHEN SITE_LON > -100 THEN 'Eastern' - WHEN SITE_LON > -120 THEN 'Central' - ELSE 'Western' - END as longitude_zone, - COUNT(*) as site_count, - AVG(SITE_LAT) as avg_latitude, - AVG(SITE_LON) as avg_longitude - FROM SITE - WHERE SITE_STAT = 1 AND SITE_LAT IS NOT NULL AND SITE_LON IS NOT NULL - GROUP BY region, longitude_zone - ORDER BY region, longitude_zone -""") +# GeoJSON (requires geopandas) +import geopandas as gpd +from shapely.geometry import Point -print("Geographic distribution of sites:") -print(regional_analysis) +geometry = [Point(xy) for xy in zip(df['SITE_LON'], df['SITE_LAT'])] +gdf = gpd.GeoDataFrame(df, geometry=geometry) +gdf.to_file("ngl_sites.geojson", driver="GeoJSON") ``` ## Connection Management -### Manual Connection Handling - ```python -# Access database connection directly ngl_db = ds.db.ngl -# Check connection status +# Check connection try: test_query = ngl_db.read_sql("SELECT 1 as test") - print("Database connection active") + print("Connection active") except Exception as e: - print(f"Database connection failed: {e}") + print(f"Connection failed: {e}") -# Close connections when done (optional - handled automatically) +# Close (optional -- handled automatically) ngl_db.close() ``` -### Connection Pooling - -```python -# dapi automatically manages connection pooling -# Multiple queries reuse connections efficiently - -queries = [ - "SELECT COUNT(*) FROM SITE", - "SELECT COUNT(*) FROM RECORD", - "SELECT COUNT(*) FROM EVENT" -] - -for query in queries: - result = ds.db.ngl.read_sql(query) - print(f"{query}: {result.iloc[0, 0]}") -``` - -## Error Handling - -### Database Connection Errors - -```python -try: - df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") - print("Query successful") -except Exception as e: - print(f"Database error: {e}") - - # Check environment variables - import os - required_vars = ['NGL_DB_USER', 'NGL_DB_PASSWORD', 'NGL_DB_HOST', 'NGL_DB_PORT'] - missing_vars = [var for var in required_vars if not os.getenv(var)] - - if missing_vars: - print(f"Missing environment variables: {missing_vars}") - else: - print("Environment variables are set, check database credentials") -``` - -### SQL Query Errors - -```python -try: - # Intentionally bad query - df = ds.db.ngl.read_sql("SELECT * FROM NONEXISTENT_TABLE") -except Exception as e: - print(f"SQL Error: {e}") - - # Provide helpful debugging - print("Tips:") - print("- Check table name spelling") - print("- Verify table exists: SHOW TABLES") - print("- Check column names: DESCRIBE table_name") -``` - ## Best Practices -### 1. Use Parameterized Queries +### Use Parameterized Queries ```python -# Good - prevents SQL injection -safe_query = ds.db.ngl.read_sql( - "SELECT * FROM SITE WHERE SITE_NAME = %s", - params=[user_input] -) +# Safe +ds.db.ngl.read_sql("SELECT * FROM SITE WHERE SITE_NAME = %s", params=[user_input]) -# Dangerous - vulnerable to SQL injection -dangerous_query = ds.db.ngl.read_sql( - f"SELECT * FROM SITE WHERE SITE_NAME = '{user_input}'" -) +# Unsafe -- SQL injection risk +ds.db.ngl.read_sql(f"SELECT * FROM SITE WHERE SITE_NAME = '{user_input}'") ``` -### 2. Limit Result Sets +### Limit Result Sets ```python -# Good - use LIMIT for large tables -limited_query = ds.db.ngl.read_sql( - "SELECT * FROM LARGE_TABLE LIMIT 1000" -) +# Use LIMIT for large tables +ds.db.ngl.read_sql("SELECT * FROM LARGE_TABLE LIMIT 1000") -# Better - use pagination for very large datasets +# Pagination for very large datasets offset = 0 batch_size = 1000 while True: @@ -496,123 +253,21 @@ while True: ) if batch.empty: break - # Process batch offset += batch_size ``` -### 3. Efficient Joins -```python -# Good - use indexes and appropriate joins -efficient_query = ds.db.ngl.read_sql(""" - SELECT s.SITE_NAME, COUNT(r.RECORD_ID) as record_count - FROM SITE s - LEFT JOIN RECORD r ON s.SITE_ID = r.SITE_ID - WHERE s.SITE_STAT = 1 - GROUP BY s.SITE_ID, s.SITE_NAME - ORDER BY record_count DESC - LIMIT 50 -""") -``` - -### 4. Data Validation -```python -# Good - validate data before analysis -df = ds.db.ngl.read_sql("SELECT SITE_LAT, SITE_LON FROM SITE") - -# Check for missing values -missing_coords = df.isnull().sum() -print(f"Missing coordinates: {missing_coords}") - -# Remove invalid coordinates -valid_coords = df.dropna() -valid_coords = valid_coords[ - (valid_coords['SITE_LAT'].between(-90, 90)) & - (valid_coords['SITE_LON'].between(-180, 180)) -] -print(f"Valid coordinates: {len(valid_coords)}/{len(df)}") -``` - -## Export and Integration - -### Export to Different Formats +## Error Handling ```python -# Query data -df = ds.db.ngl.read_sql(""" - SELECT s.SITE_NAME, s.SITE_LAT, s.SITE_LON, e.EVENT_NAME, e.EVENT_MAG - FROM SITE s - JOIN RECORD r ON s.SITE_ID = r.SITE_ID - JOIN EVENT e ON r.EVENT_ID = e.EVENT_ID - WHERE s.SITE_STAT = 1 AND r.RECORD_STAT = 1 -""") - -# Export to various formats -df.to_csv("ngl_data.csv", index=False) -df.to_excel("ngl_data.xlsx", index=False) -df.to_json("ngl_data.json", orient="records") - -# Export to GIS formats (requires geopandas) try: - import geopandas as gpd - from shapely.geometry import Point - - # Create GeoDataFrame - geometry = [Point(xy) for xy in zip(df['SITE_LON'], df['SITE_LAT'])] - gdf = gpd.GeoDataFrame(df, geometry=geometry) - gdf.to_file("ngl_sites.geojson", driver="GeoJSON") - print("Exported to GeoJSON") -except ImportError: - print("Install geopandas for GIS export: pip install geopandas") -``` - -### Integration with Analysis Tools - -```python -# Prepare data for machine learning -from sklearn.preprocessing import StandardScaler -from sklearn.model_selection import train_test_split - -# Get numeric features -features_df = ds.db.ngl.read_sql(""" - SELECT - cpt.CPT_DEPTH, - cpt.CPT_QC, - cpt.CPT_FS, - cpt.CPT_FC, - e.EVENT_MAG, - CASE WHEN l.LIQ_ID IS NOT NULL THEN 1 ELSE 0 END as liquefied - FROM CPT cpt - JOIN RECORD r ON cpt.RECORD_ID = r.RECORD_ID - JOIN EVENT e ON r.EVENT_ID = e.EVENT_ID - LEFT JOIN LIQUEFACTION l ON r.RECORD_ID = l.RECORD_ID - WHERE cpt.CPT_STAT = 1 AND r.RECORD_STAT = 1 - AND cpt.CPT_DEPTH IS NOT NULL - AND cpt.CPT_QC IS NOT NULL - AND cpt.CPT_FS IS NOT NULL - AND e.EVENT_MAG IS NOT NULL -""") - -# Remove missing values -clean_df = features_df.dropna() - -# Prepare features and target -X = clean_df[['CPT_DEPTH', 'CPT_QC', 'CPT_FS', 'CPT_FC', 'EVENT_MAG']] -y = clean_df['liquefied'] + df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") +except Exception as e: + print(f"Database error: {e}") -# Split and scale -X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) -scaler = StandardScaler() -X_train_scaled = scaler.fit_transform(X_train) -X_test_scaled = scaler.transform(X_test) + import os + required_vars = ['NGL_DB_USER', 'NGL_DB_PASSWORD', 'NGL_DB_HOST', 'NGL_DB_PORT'] + missing_vars = [var for var in required_vars if not os.getenv(var)] -print(f"Training set: {X_train.shape}") -print(f"Test set: {X_test.shape}") -print(f"Liquefaction rate: {y.mean():.3f}") + if missing_vars: + print(f"Missing environment variables: {missing_vars}") ``` - -## Next Steps - -- **[Explore complete examples](examples/database.md)** showing real database workflows -- **Learn about file operations** for data management -- **Check API reference** for detailed method documentation -- **[Review job integration](jobs.md)** for computational workflows with database data \ No newline at end of file diff --git a/docs/examples.md b/docs/examples.md index 56a87c8..1ff259a 100644 --- a/docs/examples.md +++ b/docs/examples.md @@ -1,102 +1,63 @@ # Examples -Welcome to the DAPI examples collection! These comprehensive tutorials demonstrate how to use DAPI for various computational workflows on DesignSafe. Each example includes complete, runnable code that you can try directly in DesignSafe's Jupyter environment. - - -## Ready to Start? - -Choose an example above and click "Try on DesignSafe" to begin your computational research journey! - -Each example is self-contained and includes: - -- Complete, runnable code -- Step-by-step explanations -- Real-world applications -- Troubleshooting guides -- Performance optimization tips - -*Happy computing!* - ### Application Management -Discover and manage applications available on DesignSafe. +Discover and manage applications on DesignSafe. [![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/apps.ipynb) -**What you'll learn:** -- Searching for available applications -- Getting detailed app information -- Understanding app parameters and requirements - --- ### Material Point Method (MPM) Jobs -Submit and monitor MPM simulations for large deformation problems. +Submit and monitor MPM simulations. [![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/mpm/mpm-minimal.ipynb) -**What you'll learn:** -- Setting up MPM job parameters -- Submitting jobs with resource optimization -- Monitoring job progress and status -- Downloading and analyzing results - -**[View Full Documentation →](examples/mpm.md)** +[Full documentation](examples/mpm.md) --- ### PyLauncher Parameter Sweeps -Run many independent tasks within a single SLURM allocation using PyLauncher. +Run many independent tasks within a single SLURM allocation. [![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/pylauncher/pylauncher_sweep.ipynb) -**What you'll learn:** -- Generating parameter sweep commands from templates -- Writing PyLauncher task lists and launcher scripts -- Submitting and monitoring sweep jobs - -**[View Full Documentation →](examples/pylauncher.md)** +[Full documentation](examples/pylauncher.md) --- ### OpenSees Structural Analysis -Perform earthquake engineering simulations with OpenSees. +Earthquake engineering simulations with OpenSees. [![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/opensees/opensees-mp/OpenSeesMP-dapi.ipynb) -**What you'll learn:** -- Configuring OpenSees Multi-Free Field Analysis -- Custom archive management for organized results -- Advanced postprocessing with response spectra -- Parametric studies across soil profiles +[Full documentation](examples/opensees.md) + +--- + +### OpenFOAM CFD + +Computational fluid dynamics with OpenFOAM. -**[View Full Documentation →](examples/opensees.md)** +[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/openfoam/openfoam-minimal.ipynb) + +[Full documentation](examples/openfoam.md) --- ### Database Queries -Access and analyze research data from DesignSafe databases. -[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/db.ipynb) +Access DesignSafe research databases (NGL, Earthquake Recovery, Vp). -**What you'll learn:** -- Connecting to research databases -- Writing efficient SQL queries -- Data visualization and analysis -- Working with geotechnical datasets +[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/db.ipynb) -**[View Full Documentation →](examples/database.md)** +[Full documentation](examples/database.md) --- -## Getting Started +### TMS Credentials + +Manage SSH credentials on TACC execution systems. -### Prerequisites -- DesignSafe account (free registration at [designsafe-ci.org](https://designsafe-ci.org)) -- Basic Python knowledge -- Familiarity with Jupyter notebooks +[![Try on DesignSafe](https://raw.githubusercontent.com/DesignSafe-CI/dapi/main/DesignSafe-Badge.svg)](https://jupyter.designsafe-ci.org/hub/user-redirect/lab/tree/CommunityData/dapi/tms_credentials.ipynb) -### Quick Start -1. **Click any "Try on DesignSafe" button** above -2. **Log in** to your DesignSafe account -3. **Run the notebook** cell by cell -4. **Modify parameters** to explore different scenarios \ No newline at end of file +[Full documentation](examples/tms_credentials.md) diff --git a/docs/examples/pylauncher.md b/docs/examples/pylauncher.md index c139042..6da2ba4 100644 --- a/docs/examples/pylauncher.md +++ b/docs/examples/pylauncher.md @@ -8,7 +8,7 @@ Run many independent tasks within a single SLURM allocation using [PyLauncher](h - You have many independent serial runs (parameter studies, Monte Carlo, etc.) - Each run writes to its own output directory -- You want efficient use of multi-core allocations without MPI +- You want to use multi-core allocations without MPI ## End-to-End Workflow @@ -72,13 +72,13 @@ job.monitor() Two styles are supported for command templates: -**Token style** (default) — bare uppercase placeholders: +**Token style** (default) -- bare uppercase placeholders: ```python "python run.py --mass MASS --length LENGTH" ``` -**Braces style** — for when token names might collide with other text: +**Braces style** -- for when token names might collide with other text: ```python "python run.py --mass {MASS} --length {LENGTH}" @@ -87,7 +87,7 @@ Two styles are supported for command templates: ## OpenSees Example -A realistic parameter sweep for a cantilever pushover analysis: +A parameter sweep for a cantilever pushover analysis: ```python sweep = { @@ -120,12 +120,12 @@ Use TACC environment variables for collision-free output directories: $WORK/sweep_$SLURM_JOB_ID/run_ALPHA_BETA ``` -- `$WORK` — TACC Work filesystem (avoids archiving overhead) -- `$SLURM_JOB_ID` — unique per job submission -- `$LAUNCHER_JID` / `$LAUNCHER_TSK_ID` — unique per PyLauncher task +- `$WORK` -- TACC Work filesystem (avoids archiving overhead) +- `$SLURM_JOB_ID` -- unique per job submission +- `$LAUNCHER_JID` / `$LAUNCHER_TSK_ID` -- unique per PyLauncher task ## Notes -- **PyLauncher is NOT a dapi dependency** — it's pre-installed on TACC compute nodes. dapi only generates the input files. -- **MPI is disabled** — PyLauncher's `ClassicLauncher` runs independent serial tasks. The `designsafe-agnostic-app` already has `isMpi: false`. -- **Works with any app** — OpenSees, Python, MATLAB, Fortran binaries. The task list is just shell commands. +- **PyLauncher is NOT a dapi dependency** -- it's pre-installed on TACC compute nodes. dapi only generates the input files. +- **MPI is disabled** -- PyLauncher's `ClassicLauncher` runs independent serial tasks. The `designsafe-agnostic-app` already has `isMpi: false`. +- **Works with any app** -- OpenSees, Python, MATLAB, Fortran binaries. The task list is just shell commands. diff --git a/docs/index.md b/docs/index.md index a91ca7a..7805d44 100644 --- a/docs/index.md +++ b/docs/index.md @@ -4,40 +4,10 @@ [![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/DesignSafe-CI/dapi/blob/main/LICENSE.md) [![PyPI version](https://badge.fury.io/py/dapi.svg)](https://badge.fury.io/py/dapi) -Welcome to the **DesignSafe API (dapi)** documentation! - -`dapi` is a Python library that simplifies the process of submitting, running, and monitoring [TAPIS v3](https://tapis.readthedocs.io/en/latest/) jobs on [DesignSafe](https://designsafe-ci.org) via [Jupyter Notebooks](https://jupyter.designsafe-ci.org). It provides high-level, user-friendly interfaces for working with DesignSafe resources and research databases. +`dapi` is a Python library for submitting, monitoring, and managing [TAPIS v3](https://tapis.readthedocs.io/en/latest/) jobs on [DesignSafe](https://designsafe-ci.org) via [Jupyter Notebooks](https://jupyter.designsafe-ci.org) or the command line. It also provides access to DesignSafe research databases. dapi -## Key Features - -### Job Management -- **Simple Job Submission**: Submit computational jobs with minimal configuration -- **Real-time Monitoring**: Track job progress with interactive progress bars -- **Output Management**: Easily access and download job results -- **Application Discovery**: Find and explore available DesignSafe applications - -### Database Access -- **Research Databases**: Connect to DesignSafe research databases (NGL, Earthquake Recovery, VP) -- **SQL Queries**: Execute SQL queries and get results as pandas DataFrames -- **Automatic Connection Management**: Handles database connections and credentials - -### File Operations -- **Path Translation**: Convert DesignSafe paths (/MyData, /projects) to TAPIS URIs -- **File Management**: Upload, download, and list files on DesignSafe storage -- **Path Verification**: Validate that paths exist before using them - -### Authentication & Credentials -- **Simplified Auth**: Easy authentication with DesignSafe credentials -- **Multiple Methods**: Support for environment variables, .env files, and interactive input -- **TMS Credential Management**: Establish, check, and revoke SSH keys on TACC execution systems -- **Secure**: Handles credentials securely with encrypted storage - -## Quick Start - -Get started with dapi in just a few lines: - ```python from dapi import DSClient @@ -61,43 +31,25 @@ df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 10") ## Getting Started -- **[Installation](installation.md)**: Install dapi and set up your environment -- **[Authentication](authentication.md)**: Configure credentials and authentication -- **[Quick Start](quickstart.md)**: Get up and running in 5 minutes +- [Installation](installation.md) +- [Authentication](authentication.md) +- [Quick Start](quickstart.md) ## User Guide -- **[Jobs](jobs.md)**: Submit and monitor computational jobs -- **[Database Access](database.md)**: Query DesignSafe research databases +- [Jobs](jobs.md) -- submit and monitor computational jobs +- [Database Access](database.md) -- query DesignSafe research databases ## Examples -- **[MPM Job Submission](examples/mpm.md)**: Material Point Method workflow -- **[Database Queries](examples/database.md)**: Research data analysis examples - -## Use Cases - -### Research Computing -- Submit OpenSees, MATLAB, Python, and other computational jobs -- Monitor job execution with real-time status updates -- Access job outputs and results efficiently - -### Data Analysis -- Query large research databases with SQL -- Analyze earthquake, geotechnical, and structural data -- Export results to pandas DataFrames for further analysis - -### File Management -- Organize and manage research data on DesignSafe -- Transfer files between local machines and DesignSafe storage -- Collaborate on data with project teams +- [MPM Job Submission](examples/mpm.md) +- [PyLauncher Parameter Sweeps](examples/pylauncher.md) +- [Database Queries](examples/database.md) ## Support -- **Issues**: Report bugs or request features on [GitHub Issues](https://github.com/DesignSafe-CI/dapi/issues) -- **Documentation**: Comprehensive guides and API reference -- **Community**: Connect with other users on DesignSafe forums +Report bugs or request features on [GitHub Issues](https://github.com/DesignSafe-CI/dapi/issues). ## License -dapi is licensed under the [MIT License](https://github.com/DesignSafe-CI/dapi/blob/main/LICENSE.md). \ No newline at end of file +MIT License ([view](https://github.com/DesignSafe-CI/dapi/blob/main/LICENSE.md)) diff --git a/docs/installation.md b/docs/installation.md index 721f403..f525cfc 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,59 +1,44 @@ # Installation -This guide will help you install the DesignSafe API (dapi) package and set up your environment. - ## Requirements -- **Python**: 3.10 or higher -- **Operating System**: Windows, macOS, or Linux -- **DesignSafe Account**: Required for authentication ([sign up here](https://www.designsafe-ci.org/account/register/)) - -## Installation Methods +- Python 3.10+ +- A DesignSafe account ([register](https://www.designsafe-ci.org/account/register/)) -### Install from PyPI (Recommended) - -The easiest way to install dapi is using pip: +## Install from PyPI ```bash pip install dapi ``` -### Install Latest Development Version - -To get the latest features and bug fixes: +## Install Development Version ```bash pip install git+https://github.com/DesignSafe-CI/dapi.git@dev ``` -### Install for Development - -If you want to contribute to dapi or modify the source code: +## Install for Development ```bash -# Clone the repository git clone https://github.com/DesignSafe-CI/dapi.git cd dapi # Install Poetry (if not already installed) curl -sSL https://install.python-poetry.org | python3 - -# Install dependencies and dapi in editable mode +# Install dependencies virtualenv env && source env/bin/activate poetry install ``` -You can also install an editable local version of dapi +Or install an editable local copy: ``` pip install -e . ``` +## DesignSafe Jupyter Hub -## DesignSafe Jupyter Environment - -### Installing on DesignSafe Jupyter Hub - -If you're using [DesignSafe Jupyter](https://jupyter.designsafe-ci.org/), install dapi in your notebook: +Install dapi in a notebook: ```python # Remove any previous installations (optional) @@ -67,146 +52,47 @@ If you're using [DesignSafe Jupyter](https://jupyter.designsafe-ci.org/), instal ``` :::{tip} Kernel Restart Required -After installing dapi in a Jupyter notebook, you must restart the kernel for the changes to take effect. Go to **Kernel -> Restart Kernel** in the Jupyter menu. +After installing dapi in a Jupyter notebook, restart the kernel for changes to take effect. Go to **Kernel -> Restart Kernel**. ::: -### Persistent Installation - -For a persistent installation across Jupyter sessions: +For a persistent installation across sessions: ```bash # SSH into your DesignSafe workspace terminal pip install --user dapi ``` -## Dependencies - -dapi automatically installs the following key dependencies: - -- **tapipy**: TAPIS v3 Python SDK -- **pandas**: Data manipulation and analysis -- **sqlalchemy**: Database connections -- **pymysql**: MySQL database connector -- **tqdm**: Progress bars -- **python-dotenv**: Environment variable management - ## Verify Installation -Test your installation by importing dapi: - ```python import dapi -print(f"dapi version: {dapi.__version__}") - -# List available functions -print("Available functions:") -print(dir(dapi)) -``` - -Expected output: -``` -dapi version: 0.4.9 -Available functions: -['DSClient', 'SubmittedJob', 'interpret_job_status', ...] -``` - -## Python Environment Management - -### Using Virtual Environments - -It's recommended to use virtual environments to avoid conflicts: - -```bash -# Create virtual environment -python -m venv dapi-env - -# Activate (Linux/macOS) -source dapi-env/bin/activate - -# Activate (Windows) -dapi-env\Scripts\activate - -# Install dapi -pip install dapi +print(dapi.__version__) ``` -### Using Conda +## Updating ```bash -# Create conda environment -conda create -n dapi-env python=3.10 - -# Activate environment -conda activate dapi-env - -# Install dapi -pip install dapi +pip install --upgrade dapi ``` ## Troubleshooting -### Common Installation Issues - -#### Permission Errors -If you encounter permission errors: +**Permission errors:** ```bash pip install --user dapi ``` -#### SSL Certificate Errors -If you encounter SSL issues: +**SSL certificate errors:** ```bash pip install --trusted-host pypi.org --trusted-host pypi.python.org dapi ``` -#### Version Conflicts -If you have conflicting dependencies: +**Version conflicts:** ```bash pip install dapi --force-reinstall ``` -### Platform-Specific Issues - -#### Windows -- Ensure you have Microsoft Visual C++ Build Tools installed -- Use Anaconda/Miniconda for easier dependency management - -#### macOS -- Install Xcode Command Line Tools: `xcode-select --install` -- Consider using Homebrew for Python: `brew install python` - -#### Linux -- Install development packages: `sudo apt-get install python3-dev build-essential` - -## Updating - -### Update to Latest Version -```bash -pip install --upgrade dapi -``` - -### Check Current Version -```python -import dapi -print(dapi.__version__) -``` - -## Getting Help - -If you encounter issues during installation: - -1. **Check the [Issues page](https://github.com/DesignSafe-CI/dapi/issues)** for known problems -2. **Search existing issues** before creating a new one -3. **Provide details** when reporting issues: - - Operating system and version - - Python version - - Complete error messages - - Installation method used - ## Next Steps -After successful installation: - -1. **[Set up authentication](authentication.md)** with your DesignSafe credentials -2. **[Try the quick start guide](quickstart.md)** for your first dapi workflow -3. **[Explore examples](examples/mpm.md)** to see dapi in action \ No newline at end of file +1. [Set up authentication](authentication.md) +2. [Quick start guide](quickstart.md) diff --git a/docs/jobs.md b/docs/jobs.md index d380e13..4e24582 100644 --- a/docs/jobs.md +++ b/docs/jobs.md @@ -1,18 +1,5 @@ # Job Management -This guide covers everything you need to know about submitting, monitoring, and managing computational jobs on DesignSafe using dapi. - -## Overview - -dapi provides a high-level interface for working with TAPIS v3 jobs on DesignSafe. You can: - -- **List** past jobs with filtering and search -- **Discover** available applications -- **Generate** job requests with automatic parameter mapping -- **Submit** jobs to DesignSafe compute resources -- **Monitor** job progress with real-time updates -- **Manage** job outputs and results - ## Listing Jobs Browse your job history as a pandas DataFrame with optional filtering. @@ -63,15 +50,9 @@ raw = ds.jobs.list(output="raw") ### Finding Applications ```python -from dapi import DSClient - -ds = DSClient() - -# Find all applications all_apps = ds.apps.find("", verbose=False) print(f"Found {len(all_apps)} applications") -# Search for specific applications matlab_apps = ds.apps.find("matlab", verbose=True) opensees_apps = ds.apps.find("opensees", verbose=True) mpm_apps = ds.apps.find("mpm", verbose=True) @@ -80,18 +61,16 @@ mpm_apps = ds.apps.find("mpm", verbose=True) ### Getting Application Details ```python -# Get detailed information about an application app_details = ds.apps.get_details("mpm-s3", verbose=True) print(f"App: {app_details.id}") print(f"Version: {app_details.version}") -print(f"Description: {app_details.description}") print(f"Execution System: {app_details.jobAttributes.execSystemId}") print(f"Max Runtime: {app_details.jobAttributes.maxMinutes} minutes") print(f"Default Cores: {app_details.jobAttributes.coresPerNode}") ``` -### Popular Applications +### Available Applications | Application | App ID | Description | |-------------|--------|-------------| @@ -103,15 +82,11 @@ print(f"Default Cores: {app_details.jobAttributes.coresPerNode}") | ADCIRC | `adcirc-v55` | Coastal circulation modeling | | LS-DYNA | `ls-dyna` | Explicit finite element analysis | -The **Agnostic App** (`designsafe-agnostic-app`) is DesignSafe's general-purpose app for running Python scripts, OpenSeesPy, and PyLauncher parameter sweeps on TACC systems. It supports: -- Python 3.12 with OpenSeesPy pre-installed -- PyLauncher for running many independent tasks in a single allocation -- Configurable TACC module loading -- Serial execution (`isMpi: false`) — ideal for PyLauncher workflows +The Agnostic App (`designsafe-agnostic-app`) runs Python scripts, OpenSeesPy, and PyLauncher parameter sweeps on TACC systems. It includes Python 3.12 with OpenSeesPy pre-installed and supports configurable TACC module loading. It runs in serial mode (`isMpi: false`), which is what PyLauncher workflows need. ## Job Submission -### Basic Job Submission +### Basic Submission ```python # 1. Prepare input directory @@ -132,7 +107,7 @@ job = ds.jobs.submit(job_request) print(f"Job submitted: {job.uuid}") ``` -### Advanced Job Configuration +### Advanced Configuration ```python job_request = ds.jobs.generate( @@ -147,12 +122,12 @@ job_request = ds.jobs.generate( memory_mb=96000, queue="normal", allocation="your_allocation", - + # Job metadata job_name="mpm_parametric_study_001", description="Parametric study of soil behavior under seismic loading", tags=["research", "mpm", "seismic"], - + # Additional file inputs extra_file_inputs=[ { @@ -161,13 +136,13 @@ job_request = ds.jobs.generate( "targetPath": "materials" } ], - + # Environment variables extra_env_vars=[ {"key": "OMP_NUM_THREADS", "value": "48"}, {"key": "ANALYSIS_TYPE", "value": "SEISMIC"} ], - + # Scheduler options extra_scheduler_options=[ {"name": "Email Notification", "arg": "-m be"}, @@ -179,7 +154,6 @@ job_request = ds.jobs.generate( ### Modifying Job Requests ```python -# Generate base request job_request = ds.jobs.generate(...) # Modify before submission @@ -199,7 +173,6 @@ job_request["parameterSet"]["envVariables"].append({ "value": "custom_value" }) -# Submit modified request job = ds.jobs.submit(job_request) ``` @@ -208,65 +181,58 @@ job = ds.jobs.submit(job_request) ### Real-time Monitoring ```python -# Submit job job = ds.jobs.submit(job_request) -# Monitor with progress bars final_status = job.monitor( interval=15, # Check every 15 seconds timeout_minutes=240 # Timeout after 4 hours ) -# Interpret results ds.jobs.interpret_status(final_status, job.uuid) ``` ### Manual Status Checking ```python -# Check current status current_status = job.get_status() print(f"Current status: {current_status}") -# Check if job is complete if current_status in job.TERMINAL_STATES: print("Job has finished") else: print("Job is still running") -# Get detailed job information details = job.details print(f"Submitted: {details.created}") print(f"Started: {details.started}") print(f"Last Updated: {details.lastUpdated}") ``` -### Job Status Overview +### Job Statuses | Status | Description | |--------|-------------| -| `PENDING` | Job submitted but not yet processed | +| `PENDING` | Submitted, not yet processed | | `PROCESSING_INPUTS` | Input files being staged | -| `STAGING_INPUTS` | Files being transferred to compute system | +| `STAGING_INPUTS` | Files transferring to compute system | | `STAGING_JOB` | Job being prepared for execution | -| `SUBMITTING_JOB` | Job being submitted to scheduler | -| `QUEUED` | Job waiting in scheduler queue | -| `RUNNING` | Job actively executing | +| `SUBMITTING_JOB` | Submitting to scheduler | +| `QUEUED` | Waiting in scheduler queue | +| `RUNNING` | Executing | | `ARCHIVING` | Output files being archived | -| `FINISHED` | Job completed successfully | -| `FAILED` | Job failed during execution | -| `CANCELLED` | Job was cancelled | -| `STOPPED` | Job was stopped | +| `FINISHED` | Completed successfully | +| `FAILED` | Failed | +| `CANCELLED` | Cancelled | +| `STOPPED` | Stopped | ## Job Analysis ### Runtime Summary ```python -# Get runtime breakdown job.print_runtime_summary(verbose=False) -# Detailed history (verbose mode) +# Detailed history job.print_runtime_summary(verbose=True) ``` @@ -283,168 +249,79 @@ TOTAL time: 01:29:15 ### Status Messages ```python -# Get last status message last_message = job.last_message if last_message: print(f"Last message: {last_message}") -else: - print("No status message available") ``` ## Output Management -### Listing Job Outputs +### Listing Outputs ```python -# List all files in job archive outputs = job.list_outputs() for output in outputs: print(f"- {output.name} ({output.type}, {output.size} bytes)") -# List files in subdirectory +# Subdirectory results = job.list_outputs(path="results/") ``` -### Accessing Job Archive - -```python -# Get archive URI -archive_uri = job.archive_uri -print(f"Job archive: {archive_uri}") - -# Use files interface to browse archive -files = ds.files.list(archive_uri) -for file in files: - print(f"- {file.name}") -``` - ### Reading Output Files ```python -# Read job output log stdout = job.get_output_content("tapisjob.out") if stdout: - print("Job Output:") print(stdout) -# Read last 50 lines of output +# Last 50 lines recent_output = job.get_output_content("tapisjob.out", max_lines=50) -# Read error log (if job failed) +# Error log stderr = job.get_output_content("tapisjob.err", missing_ok=True) if stderr: - print("Error Output:") print(stderr) - -# Read custom output files -results = job.get_output_content("results.txt", missing_ok=True) ``` ### Downloading Files ```python -# Download specific files job.download_output("results.mat", "/local/path/results.mat") -job.download_output("output_data.csv", "/local/analysis/data.csv") -# Download using files interface ds.files.download( f"{archive_uri}/results.mat", "/local/path/results.mat" ) ``` -## Job Management - -### Job Cancellation +## Job Cancellation ```python -# Cancel a running job job.cancel() - -# Check status after cancellation status = job.get_status() print(f"Status after cancel: {status}") ``` -The `cancel()` method sends a cancellation request to Tapis. Note that: -- Cancellation may not be immediate and depends on the job's current state -- Jobs already in terminal states (FINISHED, FAILED, etc.) cannot be cancelled -- The job status will eventually change to "CANCELLED" if the cancellation is successful +Cancellation may not be immediate. Jobs in terminal states (FINISHED, FAILED, etc.) cannot be cancelled. -### Resuming Monitoring +## Resuming Monitoring ```python -# If you lose connection, resume monitoring with job UUID from dapi import SubmittedJob job_uuid = "12345678-1234-1234-1234-123456789abc" resumed_job = SubmittedJob(ds._tapis, job_uuid) - -# Continue monitoring final_status = resumed_job.monitor() ``` -### Bulk Job Operations - -```python -# Monitor multiple jobs -job_uuids = ["uuid1", "uuid2", "uuid3"] -jobs = [SubmittedJob(ds._tapis, uuid) for uuid in job_uuids] - -# Check all statuses -for job in jobs: - status = job.get_status() - print(f"Job {job.uuid}: {status}") - -# Wait for all to complete -for job in jobs: - if job.get_status() not in job.TERMINAL_STATES: - print(f"Monitoring {job.uuid}...") - final_status = job.monitor() - print(f"Final status: {final_status}") -``` - -## System Information - -### Queue Information - -```python -# List available queues for a system -frontera_queues = ds.systems.queues("frontera") -for queue in frontera_queues: - print(f"Queue: {queue.name}") - print(f"Max runtime: {queue.maxRequestedTime} minutes") - print(f"Max nodes: {queue.maxNodesPerJob}") - -# Check if specific queue exists -dev_queue_exists = any(q.name == "development" for q in frontera_queues) -print(f"Development queue available: {dev_queue_exists}") -``` - -### System Status - -```python -# Get system information -try: - queues = ds.systems.queues("stampede3") - print(f"Stampede3 has {len(queues)} available queues") -except Exception as e: - print(f"Cannot access Stampede3: {e}") -``` - +(pylauncher)= ## Parameter Sweeps with PyLauncher -[PyLauncher](https://github.com/TACC/pylauncher) runs many independent tasks within a single SLURM allocation — ideal for parameter studies on DesignSafe. dapi provides built-in support for generating sweep commands, task lists, and launcher scripts. - -### Quick Example +[PyLauncher](https://github.com/TACC/pylauncher) runs many independent tasks within a single SLURM allocation -- ideal for parameter studies. dapi generates sweep commands, task lists, and launcher scripts. ```python -from dapi import DSClient - ds = DSClient() -# Define parameter sweep sweep = { "ALPHA": [0.3, 0.5, 3.7], "BETA": [1.1, 2.0, 3.0], @@ -466,7 +343,7 @@ ds.jobs.parametric_sweep.generate( debug="host+job", ) -# Submit the job +# Submit job = ds.jobs.parametric_sweep.submit( "/MyData/sweep_demo/", app_id="designsafe-agnostic-app", @@ -478,23 +355,29 @@ job = ds.jobs.parametric_sweep.submit( job.monitor() ``` -For a full walkthrough with OpenSees, see the **[PyLauncher example](examples/pylauncher.md)**. +For a full walkthrough with OpenSees, see the [PyLauncher example](examples/pylauncher.md). -## Advanced Patterns - -### Parametric Studies +## Bulk Operations ```python -# Submit multiple jobs with different parameters -base_request = ds.jobs.generate( - app_id="mpm-s3", - input_dir_uri=input_uri, - script_filename="template.json", - max_minutes=60, - allocation="your_allocation" -) +job_uuids = ["uuid1", "uuid2", "uuid3"] +jobs = [SubmittedJob(ds._tapis, uuid) for uuid in job_uuids] + +for job in jobs: + status = job.get_status() + print(f"Job {job.uuid}: {status}") + +for job in jobs: + if job.get_status() not in job.TERMINAL_STATES: + final_status = job.monitor() + print(f"Final status: {final_status}") +``` -# Parameter variations +## Multiple Separate Jobs + +If each run needs its own full allocation (e.g., MPI jobs that can't share nodes), submit them as separate Tapis jobs: + +```python parameters = [ {"friction": 0.1, "density": 2000}, {"friction": 0.2, "density": 2200}, @@ -503,179 +386,110 @@ parameters = [ submitted_jobs = [] for i, params in enumerate(parameters): - # Modify job request for this parameter set - job_req = base_request.copy() - job_req["name"] = f"parametric_study_{i:03d}" - job_req["description"] = f"Friction: {params['friction']}, Density: {params['density']}" - - # Add parameters as environment variables - if "parameterSet" not in job_req: - job_req["parameterSet"] = {} - if "envVariables" not in job_req["parameterSet"]: - job_req["parameterSet"]["envVariables"] = [] - - job_req["parameterSet"]["envVariables"].extend([ + job_req = ds.jobs.generate( + app_id="mpm-s3", + input_dir_uri=input_uri, + script_filename="template.json", + max_minutes=60, + allocation="your_allocation", + extra_env_vars=[ {"key": "FRICTION", "value": str(params["friction"])}, - {"key": "DENSITY", "value": str(params["density"])} - ]) - - # Submit job + {"key": "DENSITY", "value": str(params["density"])}, + ], + ) + job_req["name"] = f"parametric_study_{i:03d}" job = ds.jobs.submit(job_req) submitted_jobs.append(job) - print(f"Submitted job {i+1}/{len(parameters)}: {job.uuid}") -# Monitor all jobs -print("Monitoring all jobs...") -for i, job in enumerate(submitted_jobs): - print(f"\nMonitoring job {i+1}/{len(submitted_jobs)}: {job.uuid}") - final_status = job.monitor() - print(f"Job {i+1} final status: {final_status}") +for job in submitted_jobs: + job.monitor() ``` -### Job Dependencies +For independent serial tasks, [PyLauncher](#pylauncher) is more efficient — it runs all tasks in a single allocation. + +## Job Dependencies ```python -# Submit jobs with dependencies (manual coordination) # Job 1: Preprocessing prep_job = ds.jobs.submit(preprocessing_request) prep_status = prep_job.monitor() if prep_status == "FINISHED": - print("Preprocessing complete, starting main analysis...") - # Job 2: Main analysis (uses outputs from Job 1) main_request["fileInputs"].append({ "name": "Preprocessed Data", "sourceUrl": prep_job.archive_uri, "targetPath": "preprocessed" }) - + main_job = ds.jobs.submit(main_request) main_status = main_job.monitor() - + if main_status == "FINISHED": - print("Main analysis complete, starting postprocessing...") - # Job 3: Postprocessing post_request["fileInputs"].append({ "name": "Analysis Results", "sourceUrl": main_job.archive_uri, "targetPath": "results" }) - + post_job = ds.jobs.submit(post_request) final_status = post_job.monitor() - - print(f"Pipeline complete. Final status: {final_status}") ``` ## Error Handling -### Common Issues and Solutions - ```python from dapi import JobSubmissionError, JobMonitorError try: - # Job submission job = ds.jobs.submit(job_request) final_status = job.monitor() except JobSubmissionError as e: print(f"Job submission failed: {e}") - - # Check common issues + if "allocation" in str(e).lower(): print("Check your TACC allocation is correct and active") elif "queue" in str(e).lower(): print("Check the queue name is valid for the system") elif "file" in str(e).lower(): print("Check input files exist and paths are correct") - + except JobMonitorError as e: print(f"Job monitoring failed: {e}") - - # Try to get last known status try: status = job.get_status() print(f"Last known status: {status}") except: print("Cannot determine job status") - -except Exception as e: - print(f"Unexpected error: {e}") ``` ### Debugging Failed Jobs ```python -# For failed jobs, get detailed error information if final_status == "FAILED": - print("Debugging failed job...") - - # Get error logs stderr = job.get_output_content("tapisjob.err", missing_ok=True) if stderr: print("Standard Error:") print(stderr) - - # Get last part of stdout + stdout = job.get_output_content("tapisjob.out", max_lines=100) if stdout: print("Last 100 lines of output:") print(stdout) - - # Check job details + details = job.details print(f"Last message: {details.lastMessage}") - print(f"Status history available via: job.print_runtime_summary(verbose=True)") + print(f"Full history: job.print_runtime_summary(verbose=True)") ``` -## Best Practices +## System Queues -### 1. Resource Planning ```python -# Choose appropriate resources -job_request = ds.jobs.generate( - app_id="mpm-s3", - input_dir_uri=input_uri, - script_filename="analysis.json", - max_minutes=60, # Realistic time estimate - node_count=1, # Start small, scale up - cores_per_node=24, # Match application parallelism - queue="development", # Use dev queue for testing - allocation="your_allocation" -) -``` - -### 2. Job Organization -```python -# Use descriptive names and metadata -job_request["name"] = f"seismic_analysis_{site_id}_{datetime.now().strftime('%Y%m%d_%H%M')}" -job_request["description"] = f"Seismic analysis for site {site_id} using {method} method" -job_request["tags"] = ["research", "seismic", site_id, method] -``` - -### 3. Error Recovery -```python -# Implement retry logic for transient failures -max_retries = 3 -for attempt in range(max_retries): - try: - job = ds.jobs.submit(job_request) - final_status = job.monitor() - break - except JobSubmissionError as e: - if attempt < max_retries - 1: - print(f"Attempt {attempt + 1} failed, retrying... ({e})") - time.sleep(60) # Wait before retry - else: - raise +frontera_queues = ds.systems.queues("frontera") +for queue in frontera_queues: + print(f"Queue: {queue.name}") + print(f"Max runtime: {queue.maxRequestedTime} minutes") + print(f"Max nodes: {queue.maxNodesPerJob}") ``` - -## Next Steps - -- **[Learn database access](database.md)** for research data integration -- **[Explore complete examples](examples/mpm.md)** showing real workflows -- **Check API reference** for detailed method documentation -- **Review file operations** for data management \ No newline at end of file diff --git a/docs/quickstart.md b/docs/quickstart.md index b2dfb3d..e732276 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -1,359 +1,91 @@ # Quick Start -Get up and running with dapi in just a few minutes! This guide will walk you through your first job submission and database query. - -## Prerequisites - -1. **Install dapi**: `pip install dapi` (see [Installation Guide](installation.md)) -2. **DesignSafe Account**: [Sign up here](https://www.designsafe-ci.org/account/register/) if needed -3. **Authentication**: Set up credentials (see [Authentication Guide](authentication.md)) - -## 5-Minute Example - -Here's a complete example that demonstrates the core dapi functionality: - ```python -from dapi import DSClient - -# 1. Initialize client (handles authentication) -ds = DSClient() - -# 2. Find available applications -matlab_apps = ds.apps.find("matlab", verbose=True) - -# 3. Submit a simple job -job_request = ds.jobs.generate( - app_id="matlab-r2023a", - input_dir_uri="/MyData/analysis/input/", - script_filename="run_analysis.m", - max_minutes=30, - allocation="your_allocation" -) - -# 4. Submit and monitor -job = ds.jobs.submit(job_request) -final_status = job.monitor() - -# 5. Check results -if final_status == "FINISHED": - print("Job completed successfully!") - job.print_runtime_summary() - - # Get job outputs - outputs = job.list_outputs() - for output in outputs: - print(f"- {output.name} ({output.type})") - -# 6. Query research database -df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") -print(df) +pip install dapi ``` -## Step-by-Step Walkthrough - -### Step 1: Initialize the Client - ```python from dapi import DSClient -# This will prompt for credentials if not found in environment ds = DSClient() -# Output: Authentication successful. -``` -`DSClient()` automatically sets up TMS credentials on TACC execution systems (Frontera, Stampede3, LS6). You'll see a summary like: - -``` -TMS credentials ready: frontera, stampede3, ls6 -``` +# Translate path and submit a job +input_uri = ds.files.to_uri("/MyData/analysis/input/") -Systems where you don't have an allocation are silently skipped. See the [Authentication Guide](authentication.md#tms-credentials-execution-system-access) for manual control. - -### Step 2: Explore Available Applications - -```python -# Find all applications -all_apps = ds.apps.find("", verbose=False) -print(f"Found {len(all_apps)} total applications") - -# Find specific applications -mpm_apps = ds.apps.find("mpm", verbose=True) -matlab_apps = ds.apps.find("matlab", verbose=True) -opensees_apps = ds.apps.find("opensees", verbose=True) - -# Get detailed information about an app -app_details = ds.apps.get_details("mpm-s3", verbose=True) -``` - -### Step 3: Prepare Your Input Files - -```python -# Translate DesignSafe paths to TAPIS URIs -input_path = "/MyData/mpm-benchmarks/2d/uniaxial_stress/" -input_uri = ds.files.to_uri(input_path, verify_exists=True) -print(f"Input URI: {input_uri}") - -# List files in the directory -files = ds.files.list(input_uri) -for file in files: - print(f"- {file.name} ({file.type}, {file.size} bytes)") -``` - -### Step 4: Generate Job Request - -```python -# Generate a job request with automatic parameter mapping job_request = ds.jobs.generate( - app_id="mpm-s3", - input_dir_uri=input_uri, - script_filename="mpm.json", - max_minutes=10, - node_count=1, - cores_per_node=1, - allocation="your_tacc_allocation" + app_id="matlab-r2023a", + input_dir_uri=input_uri, + script_filename="run_analysis.m", + max_minutes=30, + allocation="your_allocation", ) -# Optionally modify the request -job_request["description"] = "My MPM analysis" -job_request["tags"] = ["research", "mpm"] -``` - -### Step 5: Submit and Monitor Job - -```python -# Submit the job job = ds.jobs.submit(job_request) -print(f"Job submitted: {job.uuid}") - -# Monitor with real-time progress -final_status = job.monitor(interval=15) - -# Interpret the result -ds.jobs.interpret_status(final_status, job.uuid) +job.monitor() ``` -### Step 6: Access Job Results +That's it. `DSClient()` handles authentication and TMS credentials automatically. -```python -# Print runtime summary -if final_status in job.TERMINAL_STATES: - job.print_runtime_summary(verbose=False) - - # Get archive URI - archive_uri = job.archive_uri - print(f"Results at: {archive_uri}") - - # List output files - outputs = job.list_outputs() - for output in outputs: - print(f"- {output.name}") - - # Read job output - stdout = job.get_output_content("tapisjob.out", max_lines=50) - if stdout: - print("Job Output:") - print(stdout) -``` +## More examples -### Step 7: Query Research Databases +### Find apps ```python -# Query NGL database -ngl_data = ds.db.ngl.read_sql(""" - SELECT SITE_NAME, SITE_LAT, SITE_LON - FROM SITE - WHERE SITE_LAT > 35 - LIMIT 10 -""") -print("NGL Sites:") -print(ngl_data) - -# Query with parameters -site_name = "Amagasaki" -site_data = ds.db.ngl.read_sql( - "SELECT * FROM SITE WHERE SITE_NAME = %s", - params=[site_name] -) -print(f"Data for {site_name}:") -print(site_data) -``` - -## Common Workflows - -### Workflow 1: MATLAB Analysis - -```python -# Submit MATLAB job -job_request = ds.jobs.generate( - app_id="matlab-r2023a", - input_dir_uri="/MyData/matlab/analysis/", - script_filename="main.m", - max_minutes=60, - allocation="your_allocation" -) - -job = ds.jobs.submit(job_request) -final_status = job.monitor() - -if final_status == "FINISHED": - # Download specific result file - job.download_output("results.mat", "/local/path/results.mat") +ds.apps.find("matlab") +ds.apps.find("opensees") ``` -### Workflow 2: OpenSees Simulation +### List files ```python -# Submit OpenSees job -job_request = ds.jobs.generate( - app_id="opensees-express", - input_dir_uri="/MyData/opensees/earthquake/", - script_filename="earthquake_analysis.tcl", - max_minutes=120, - allocation="your_allocation" -) - -job = ds.jobs.submit(job_request) -final_status = job.monitor() +files = ds.files.list(input_uri) +for f in files: + print(f.name) ``` -### Workflow 3: Database Analysis +### Check job results ```python -# Complex database query with joins -query = """ -SELECT s.SITE_NAME, s.SITE_LAT, s.SITE_LON, COUNT(r.RECORD_ID) as num_records -FROM SITE s -LEFT JOIN RECORD r ON s.SITE_ID = r.SITE_ID -WHERE s.SITE_LAT BETWEEN 32 AND 38 -GROUP BY s.SITE_ID -HAVING num_records > 5 -ORDER BY num_records DESC -LIMIT 20 -""" +job.print_runtime_summary() -df = ds.db.ngl.read_sql(query) -print("Sites with most records in California:") -print(df) +outputs = job.list_outputs() +for output in outputs: + print(output.name) -# Export to CSV -df.to_csv("california_sites.csv", index=False) +stdout = job.get_output_content("tapisjob.out") +print(stdout) ``` -## Configuration Tips - -### Set Default Allocation +### Query research databases ```python -import os -os.environ['DEFAULT_ALLOCATION'] = 'your_tacc_allocation' - -# Now you can omit allocation in job requests -job_request = ds.jobs.generate( - app_id="mpm-s3", - input_dir_uri=input_uri, - script_filename="mpm.json" - # allocation will use DEFAULT_ALLOCATION -) +df = ds.db.ngl.read_sql("SELECT * FROM SITE LIMIT 5") +print(df) ``` -### Customize Job Monitoring +### PyLauncher parameter sweep ```python -# Monitor with custom interval and timeout -final_status = job.monitor( - interval=30, # Check every 30 seconds - timeout_minutes=240 # Timeout after 4 hours -) - -# Handle different outcomes -if final_status == "FINISHED": - print("Success!") -elif final_status == "FAILED": - print("Job failed") - # Get error details - stderr = job.get_output_content("tapisjob.err") - if stderr: - print("Error details:", stderr) -elif final_status == "TIMEOUT": - print("Monitoring timed out") -``` - -## Error Handling +sweep = {"ALPHA": [0.3, 0.5, 3.7], "BETA": [1.1, 2.0, 3.0]} -```python -from dapi import ( - AuthenticationError, - CredentialError, - JobSubmissionError, - FileOperationError, - JobMonitorError +ds.jobs.parametric_sweep.generate( + "python3 simulate.py --alpha ALPHA --beta BETA --output out_ALPHA_BETA", + sweep, + "/home/jupyter/MyData/sweep_demo/", ) -try: - ds = DSClient() - - # Try to submit job - job_request = ds.jobs.generate(...) - job = ds.jobs.submit(job_request) - final_status = job.monitor() - -except AuthenticationError as e: - print(f"Authentication failed: {e}") -except CredentialError as e: - print(f"TMS credential error: {e}") -except JobSubmissionError as e: - print(f"Job submission failed: {e}") -except FileOperationError as e: - print(f"File operation failed: {e}") -except JobMonitorError as e: - print(f"Job monitoring failed: {e}") -except Exception as e: - print(f"Unexpected error: {e}") -``` - -## Best Practices - -### 1. Always Verify Paths -```python -# Good - verify path exists -input_uri = ds.files.to_uri( - "/MyData/analysis/", - verify_exists=True +job = ds.jobs.parametric_sweep.submit( + "/MyData/sweep_demo/", + app_id="designsafe-agnostic-app", + allocation="your_allocation", + node_count=1, + cores_per_node=48, ) - -# Risk - path might not exist -input_uri = ds.files.to_uri("/MyData/analysis/") -``` - -### 2. Use Descriptive Job Names -```python -# Good - descriptive name -job_request["name"] = "earthquake_analysis_2024_site_A" -job_request["description"] = "Nonlinear seismic analysis for Site A" -job_request["tags"] = ["earthquake", "site-A", "research"] +job.monitor() ``` -### 3. Handle Long-Running Jobs -```python -# For long jobs, save job UUID for later monitoring -job = ds.jobs.submit(job_request) -job_uuid = job.uuid - -# Save UUID to file or environment -with open("current_job.txt", "w") as f: - f.write(job_uuid) - -# Later, resume monitoring -from dapi import SubmittedJob -saved_job = SubmittedJob(ds._tapis, job_uuid) -final_status = saved_job.monitor() -``` - -## Next Steps - -Now that you've completed the quick start: +## Next steps -1. **[Explore detailed job management](jobs.md)** for advanced job operations -2. **[Learn database querying](database.md)** for research data analysis -3. **[Study complete examples](examples/mpm.md)** for real-world workflows -4. **Check the API reference** for all available methods \ No newline at end of file +- [Jobs](jobs.md) — advanced job configuration, error handling, monitoring +- [Database](database.md) — research data queries +- [Examples](examples.md) — full notebook workflows diff --git a/examples/apps.ipynb b/examples/apps.ipynb index 2632a38..8da6157 100644 --- a/examples/apps.ipynb +++ b/examples/apps.ipynb @@ -22,7 +22,9 @@ "id": "839fa332-70a6-4818-a190-18c9ca109c28", "metadata": {}, "outputs": [], - "source": "%pip install dapi --quiet" + "source": [ + "%pip install dapi --quiet" + ] }, { "cell_type": "code", @@ -40,8 +42,6 @@ } ], "source": [ - "import os\n", - "\n", "# Import only DSClient and exceptions needed at top level\n", "from dapi import (\n", " DSClient,\n", diff --git a/examples/db.ipynb b/examples/db.ipynb index 90ce5b4..5114a57 100644 --- a/examples/db.ipynb +++ b/examples/db.ipynb @@ -14,7 +14,9 @@ "id": "5a76c77b-0078-48fc-ade6-e46cbac010dd", "metadata": {}, "outputs": [], - "source": "%pip install dapi --quiet" + "source": [ + "%pip install dapi --quiet" + ] }, { "cell_type": "code", @@ -24,7 +26,6 @@ "outputs": [], "source": [ "# Cell: Imports\n", - "import os\n", "from dapi import DSClient # Import only the main client\n", "\n", "# Import exceptions if needed for specific handling later" diff --git a/examples/mpm/mpm.ipynb b/examples/mpm/mpm.ipynb index 1d87046..84ff4b1 100644 --- a/examples/mpm/mpm.ipynb +++ b/examples/mpm/mpm.ipynb @@ -22,7 +22,9 @@ "id": "dabd7715", "metadata": {}, "outputs": [], - "source": "%pip install dapi --quiet" + "source": [ + "%pip install dapi --quiet" + ] }, { "cell_type": "code", @@ -40,8 +42,6 @@ } ], "source": [ - "import os\n", - "\n", "# Import only DSClient and exceptions needed at top level\n", "from dapi import (\n", " DSClient,\n", diff --git a/examples/openfoam/openfoam-minimal.ipynb b/examples/openfoam/openfoam-minimal.ipynb index 0abc25c..ea25a98 100644 --- a/examples/openfoam/openfoam-minimal.ipynb +++ b/examples/openfoam/openfoam-minimal.ipynb @@ -19,7 +19,9 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": "%pip install dapi --quiet" + "source": [ + "%pip install dapi --quiet" + ] }, { "cell_type": "code", @@ -419,8 +421,7 @@ "metadata": {}, "outputs": [], "source": [ - "import matplotlib.pyplot as plt\n", - "import os" + "import matplotlib.pyplot as plt" ] }, { diff --git a/myst.yml b/myst.yml index 37ec574..e358827 100644 --- a/myst.yml +++ b/myst.yml @@ -1,13 +1,6 @@ version: 1 project: title: DAPI - DesignSafe API - authors: - - name: Krishna Kumar - email: krishnak@utexas.edu - - name: Pedro Arduino - email: parduino@uw.edu - - name: Scott Brandenberg - email: sjbrandenberg@ucla.edu copyright: "2024" github: https://github.com/DesignSafe-CI/dapi license: MIT @@ -32,18 +25,8 @@ project: - file: docs/examples/pylauncher.md - file: docs/examples/tms_credentials.md - file: docs/examples/database.md - - title: API Reference - children: - - file: docs/api/index.md - - file: docs/api/client.md - - file: docs/api/jobs.md - - file: docs/api/launcher.md - - file: docs/api/files.md - - file: docs/api/apps.md - - file: docs/api/systems.md - - file: docs/api/database.md - - file: docs/api/auth.md - - file: docs/api/exceptions.md + - url: /api/ + title: API Reference exclude: - _build/** - out/** From cbc3910337738a5f92b505aa8bfa1b60dc64b829 Mon Sep 17 00:00:00 2001 From: Krishna Kumar Date: Sun, 22 Mar 2026 14:43:15 -0500 Subject: [PATCH 21/21] Documentation --- docs/apps.md | 79 +++++++++++++++++++++++++++++++++++++++++++ docs/files.md | 89 +++++++++++++++++++++++++++++++++++++++++++++++++ docs/jobs.md | 4 +-- docs/systems.md | 42 +++++++++++++++++++++++ myst.yml | 3 ++ 5 files changed, 215 insertions(+), 2 deletions(-) create mode 100644 docs/apps.md create mode 100644 docs/files.md create mode 100644 docs/systems.md diff --git a/docs/apps.md b/docs/apps.md new file mode 100644 index 0000000..5e00609 --- /dev/null +++ b/docs/apps.md @@ -0,0 +1,79 @@ +# Apps + +## Find apps + +Search by name. Use an empty string to list everything. + +```python +from dapi import DSClient +ds = DSClient() + +ds.apps.find("matlab") +# Found 3 matching apps: +# - matlab-r2023a (Version: 1.0, Owner: designsafe) +# - matlab-parallel (Version: 2.1, Owner: tacc) +# - matlab-desktop (Version: 1.5, Owner: designsafe) + +ds.apps.find("opensees") +ds.apps.find("mpm") + +# All apps (quiet mode) +all_apps = ds.apps.find("", verbose=False) +len(all_apps) +``` + +The search uses partial matching — `"matlab"` matches any app with "matlab" in the ID. + +You can filter by ownership: + +```python +# Only apps you own +ds.apps.find("", list_type="OWNED") + +# Only shared/public apps +ds.apps.find("", list_type="SHARED_PUBLIC") +``` + +Valid `list_type` values: `"ALL"` (default), `"OWNED"`, `"SHARED_PUBLIC"`, `"SHARED_DIRECT"`, `"READ_PERM"`, `"MINE"`. + +## App details + +```python +app = ds.apps.get_details("mpm-s3") +# App Details: +# ID: mpm-s3 +# Version: 0.1.0 +# Owner: designsafe +# Execution System: frontera +# Description: ... +``` + +Access job configuration: + +```python +attrs = app.jobAttributes +print(attrs.execSystemId) # frontera +print(attrs.maxMinutes) # 2880 +print(attrs.coresPerNode) # 56 +print(attrs.execSystemLogicalQueue) # normal +``` + +Request a specific version: + +```python +app = ds.apps.get_details("mpm-s3", app_version="0.1.0") +``` + +Returns `None` if the app doesn't exist (instead of raising). + +## Common apps + +| App ID | Description | +|---|---| +| `designsafe-agnostic-app` | General-purpose Python, OpenSees, PyLauncher | +| `matlab-r2023a` | MATLAB | +| `opensees-express` | OpenSees (serial) | +| `opensees-mp-s3` | OpenSees (MPI parallel) | +| `mpm-s3` | Material Point Method | +| `adcirc-v55` | ADCIRC coastal modeling | +| `ls-dyna` | LS-DYNA finite element | diff --git a/docs/files.md b/docs/files.md new file mode 100644 index 0000000..0d77be7 --- /dev/null +++ b/docs/files.md @@ -0,0 +1,89 @@ +# Files + +## Path translation + +DesignSafe uses Tapis URIs internally (`tapis://system-id/path`). Most users work with familiar paths like `/MyData/folder/` — dapi translates between the two. + +```python +from dapi import DSClient +ds = DSClient() + +# MyData → includes your username automatically +ds.files.to_uri("/MyData/analysis/input/") +# tapis://designsafe.storage.default//analysis/input/ + +# Community data +ds.files.to_uri("/CommunityData/some-dataset/") +# tapis://designsafe.storage.community/some-dataset/ + +# Projects — looks up the Tapis system ID from the project number +ds.files.to_uri("/projects/PRJ-1234/data/") +# tapis://project-xxxx-xxxx-xxxx/data/ + +# Already a Tapis URI — passed through unchanged +ds.files.to_uri("tapis://designsafe.storage.default//folder/") +``` + +Verify a path exists before using it: + +```python +ds.files.to_uri("/MyData/analysis/input/", verify_exists=True) +``` + +Reverse translation (URI back to Jupyter path): + +```python +ds.files.to_path("tapis://designsafe.storage.default//data/file.txt") +# /home/jupyter/MyData/data/file.txt + +ds.files.to_path("tapis://designsafe.storage.community/datasets/eq.csv") +# /home/jupyter/CommunityData/datasets/eq.csv +``` + +### Supported path formats + +| Input path | Tapis system | +|---|---| +| `/MyData/...` | `designsafe.storage.default//...` | +| `/home/jupyter/MyData/...` | `designsafe.storage.default//...` | +| `jupyter/MyData/...` | `designsafe.storage.default//...` | +| `/CommunityData/...` | `designsafe.storage.community/...` | +| `/projects/PRJ-XXXX/...` | `project-/...` (auto-discovered) | +| `tapis://...` | passed through unchanged | + +## List files + +```python +files = ds.files.list("tapis://designsafe.storage.default//analysis/") +for f in files: + print(f"{f.name} ({f.type}, {f.size} bytes)") +``` + +Pagination: + +```python +# Get items 100-199 +files = ds.files.list(uri, limit=100, offset=100) +``` + +## Upload + +```python +ds.files.upload( + "/local/path/input.json", + "tapis://designsafe.storage.default//analysis/input.json", +) +``` + +The local file must exist and be a regular file (not a directory). Parent directories are created on the remote system. + +## Download + +```python +ds.files.download( + "tapis://designsafe.storage.default//results/output.csv", + "/local/path/output.csv", +) +``` + +Local parent directories are created automatically. The local path must be a file path, not a directory. diff --git a/docs/jobs.md b/docs/jobs.md index 4e24582..e7e7caf 100644 --- a/docs/jobs.md +++ b/docs/jobs.md @@ -490,6 +490,6 @@ if final_status == "FAILED": frontera_queues = ds.systems.queues("frontera") for queue in frontera_queues: print(f"Queue: {queue.name}") - print(f"Max runtime: {queue.maxRequestedTime} minutes") - print(f"Max nodes: {queue.maxNodesPerJob}") + print(f"Max runtime: {queue.maxMinutes} min") + print(f"Max nodes: {queue.maxNodeCount}") ``` diff --git a/docs/systems.md b/docs/systems.md new file mode 100644 index 0000000..e55a633 --- /dev/null +++ b/docs/systems.md @@ -0,0 +1,42 @@ +# Systems + +## Queues + +List available batch queues on a TACC execution system. + +```python +from dapi import DSClient + +ds = DSClient() + +queues = ds.systems.queues("frontera") +for q in queues: + print(f"{q.name}: max {q.maxNodeCount} nodes, {q.maxMinutes} min") +``` + +## TMS credentials + +dapi needs SSH credentials on TACC systems to submit jobs. `DSClient()` sets these up automatically on init. To manage them manually: + +```python +# Check +ds.systems.check_credentials("frontera") +# → True / False + +# Establish (idempotent — skips if already set) +ds.systems.establish_credentials("frontera") + +# Force re-create +ds.systems.establish_credentials("frontera", force=True) + +# Revoke +ds.systems.revoke_credentials("frontera") +``` + +## TACC systems + +| System | Typical use | +|---|---| +| `frontera` | Large-scale compute | +| `stampede3` | General-purpose compute | +| `ls6` | Lone Star 6 | diff --git a/myst.yml b/myst.yml index e358827..200e7b1 100644 --- a/myst.yml +++ b/myst.yml @@ -13,7 +13,10 @@ project: - file: docs/quickstart.md - title: User Guide children: + - file: docs/apps.md + - file: docs/files.md - file: docs/jobs.md + - file: docs/systems.md - file: docs/database.md - title: Examples children: