From de8cc39eba709c182932cd0b4c29ad65a4ff2891 Mon Sep 17 00:00:00 2001 From: Snehil Kishore Date: Mon, 23 Feb 2026 18:28:20 +0530 Subject: [PATCH 1/8] feat: add Multiple Custom Domain Support --- .gitignore | 1 - README.md | 47 +- docs/Caching.md | 101 +++ docs/MultipleCustomDomain.md | 286 ++++++++ poetry.lock | 448 +++++++++--- src/auth0_api_python/__init__.py | 17 +- src/auth0_api_python/api_client.py | 320 +++++++- src/auth0_api_python/cache.py | 164 +++++ src/auth0_api_python/config.py | 26 +- src/auth0_api_python/errors.py | 20 + src/auth0_api_python/types.py | 48 ++ src/auth0_api_python/utils.py | 132 +++- tests/test_api_client.py | 1085 ++++++++++++++++++++++++++-- tests/test_cache.py | 163 +++++ tests/test_utils.py | 104 +++ 15 files changed, 2755 insertions(+), 207 deletions(-) create mode 100644 docs/Caching.md create mode 100644 docs/MultipleCustomDomain.md create mode 100644 src/auth0_api_python/cache.py create mode 100644 src/auth0_api_python/types.py create mode 100644 tests/test_cache.py create mode 100644 tests/test_utils.py diff --git a/.gitignore b/.gitignore index 7a66604..d6eb424 100644 --- a/.gitignore +++ b/.gitignore @@ -15,7 +15,6 @@ env/ #Build files dist -docs #testfile setup.py diff --git a/README.md b/README.md index 9496359..9c9c254 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,8 @@ This SDK provides comprehensive support for securing APIs with Auth0-issued acce ### **Core Features** - **Unified Entry Point**: `verify_request()` - automatically detects and validates Bearer or DPoP schemes -- **OIDC Discovery** - Automatic fetching of Auth0 metadata and JWKS +- **Multi-Custom Domain (MCD)** - Accept tokens from multiple Auth0 domains with static lists or dynamic resolvers +- **OIDC Discovery** - Automatic fetching of Auth0 metadata and JWKS with per-issuer caching - **JWT Validation** - Complete RS256 signature verification with claim validation - **DPoP Proof Verification** - Full RFC 9449 compliance with ES256 signature validation - **Flexible Configuration** - Support for both "Allowed" and "Required" DPoP modes @@ -279,6 +280,50 @@ api_client = ApiClient(ApiClientOptions( )) ``` +### 7. Multi-Custom Domain (MCD) Support + +If your Auth0 tenant has multiple custom domains, or you're migrating between domains, the SDK can accept tokens from any of them: + +#### Static Domain List + +```python +from auth0_api_python import ApiClient, ApiClientOptions + +api_client = ApiClient(ApiClientOptions( + domains=[ + "tenant.auth0.com", + "auth.example.com", + "auth.acme.org" + ], + audience="https://api.example.com" +)) + +# Tokens from any of the three domains are accepted +claims = await api_client.verify_access_token(access_token) +``` + +#### Dynamic Resolver + +For runtime domain resolution based on request context: + +```python +from auth0_api_python import ApiClient, ApiClientOptions, DomainsResolverContext + +def resolve_domains(context: DomainsResolverContext) -> list[str]: + # Determine allowed domains based on the request + return ["tenant.auth0.com", "auth.example.com"] + +api_client = ApiClient(ApiClientOptions( + domains=resolve_domains, + audience="https://api.example.com" +)) +``` + +For hybrid mode (migration scenarios), resolver patterns, error handling, and caching configuration, see the full guides: + +- **[Multi-Custom Domain Guide](docs/MultipleCustomDomain.md)** - Configuration modes, resolver patterns, migration, error handling +- **[Caching Guide](docs/Caching.md)** - Cache tuning, custom adapters (Redis, Memcached) + ## Feedback ### Contributing diff --git a/docs/Caching.md b/docs/Caching.md new file mode 100644 index 0000000..befda62 --- /dev/null +++ b/docs/Caching.md @@ -0,0 +1,101 @@ +# Caching + +The SDK caches OIDC discovery metadata and JWKS (JSON Web Key Sets) to avoid redundant network calls on every token verification. In MCD mode, each issuer domain gets its own cache entries. + +## Default Behavior + +By default, the SDK uses an in-memory LRU cache with: + +- **TTL**: 600 seconds (10 minutes), or the server's `Cache-Control: max-age` value - whichever is lower +- **Max entries**: 100 per cache (discovery and JWKS caches are separate) +- **Eviction**: Least Recently Used (LRU) when max entries is reached + +No configuration is needed for the default cache. It works well for single-server deployments. + +## Configuration + +### TTL and Max Entries + +```python +from auth0_api_python import ApiClient, ApiClientOptions + +api_client = ApiClient(ApiClientOptions( + domains=["tenant.auth0.com", "auth.example.com"], + audience="https://api.example.com", + cache_ttl_seconds=300, # 5 minutes max TTL + cache_max_entries=50 # 50 entries per cache +)) +``` + +The effective TTL for each entry is `min(server_max_age, cache_ttl_seconds)`. Auth0 typically sends `Cache-Control: max-age=15` for discovery metadata, so the effective TTL will be 15 seconds even if you configure a higher value. + +### Custom Cache Adapter + +For distributed deployments (multiple servers, containers), use a shared cache backend by implementing `CacheAdapter`: + +```python +import json +from typing import Any, Optional +from auth0_api_python import ApiClient, ApiClientOptions, CacheAdapter + +class RedisCache(CacheAdapter): + def __init__(self, redis_client): + self.redis = redis_client + + def get(self, key: str) -> Optional[Any]: + value = self.redis.get(key) + return json.loads(value) if value else None + + def set(self, key: str, value: Any, ttl_seconds: Optional[int] = None) -> None: + serialized = json.dumps(value) + if ttl_seconds: + self.redis.set(key, serialized, ex=ttl_seconds) + else: + self.redis.set(key, serialized) + + def delete(self, key: str) -> None: + self.redis.delete(key) + + def clear(self) -> None: + # Be careful: this clears the entire Redis database + self.redis.flushdb() + +# Usage +import redis +redis_client = redis.Redis(host="localhost", port=6379, db=0) + +api_client = ApiClient(ApiClientOptions( + domains=["tenant.auth0.com", "auth.example.com"], + audience="https://api.example.com", + cache_adapter=RedisCache(redis_client) +)) +``` + +When a custom adapter is provided, both the discovery cache and JWKS cache use it. Cache keys are inherently distinct — discovery keys are normalized issuer URLs (e.g., `https://tenant.auth0.com/`) and JWKS keys are `jwks_uri` values (e.g., `https://tenant.auth0.com/.well-known/jwks.json`). + +## Tuning Recommendations + +### TTL + +- **Development**: Use a short TTL (e.g., `cache_ttl_seconds=10`) to pick up configuration changes quickly +- **Production**: The default (600 seconds) is a reasonable upper bound. Auth0's `Cache-Control: max-age` headers will typically set a lower effective TTL + +### Max Entries + +Each issuer domain consumes **2 cache entries** (one for discovery metadata, one for JWKS). Size the cache based on the number of distinct issuers you expect: + +- **Static list with 3 domains**: `cache_max_entries=10` is more than enough +- **Dynamic resolver with many issuers**: Set to `(expected_issuers * 2) + buffer` + +When the cache is full, the least recently used entry is evicted. A cache miss triggers a network fetch on the next verification for that issuer. + +## CacheAdapter API + +| Method | Signature | Description | +|---|---|---| +| `get` | `(key: str) -> Optional[Any]` | Return cached value or `None` if not found / expired | +| `set` | `(key: str, value: Any, ttl_seconds: Optional[int]) -> None` | Store value with optional TTL | +| `delete` | `(key: str) -> None` | Remove a single entry | +| `clear` | `() -> None` | Remove all entries | + +All methods are synchronous. The `value` passed to `set` is a dictionary (parsed JSON from Auth0's OIDC and JWKS endpoints). diff --git a/docs/MultipleCustomDomain.md b/docs/MultipleCustomDomain.md new file mode 100644 index 0000000..96d0a46 --- /dev/null +++ b/docs/MultipleCustomDomain.md @@ -0,0 +1,286 @@ +# Multi-Custom Domain (MCD) + +Multi-Custom Domain support allows your API to accept tokens issued by multiple Auth0 domains. This is useful when: + +- Your Auth0 tenant has multiple custom domains configured +- You're migrating from one domain to another and need to accept tokens from both during the transition +- Your API serves requests from clients using different Auth0 domains + +## Configuration Modes + +### Static Domain List + +For APIs that accept tokens from a known set of domains: + +```python +from auth0_api_python import ApiClient, ApiClientOptions + +api_client = ApiClient(ApiClientOptions( + domains=[ + "tenant.auth0.com", + "auth.example.com", + "auth.acme.org" + ], + audience="https://api.example.com" +)) + +# Tokens from any of the three domains are accepted +claims = await api_client.verify_access_token(access_token) +``` + +The SDK validates the token's issuer against the configured list before performing OIDC discovery. Each domain gets its own cached discovery metadata and JWKS. + +### Dynamic Resolver + +For APIs that need to determine allowed domains at runtime (e.g., based on the request): + +```python +from auth0_api_python import ApiClient, ApiClientOptions, DomainsResolverContext + +def resolve_domains(context: DomainsResolverContext) -> list[str]: + # context contains: + # unverified_iss - the token's issuer claim (before verification) + # request_url - the URL the request was made to (if provided) + # request_headers - the request headers dict (if provided) + return ["tenant.auth0.com", "auth.example.com"] + +api_client = ApiClient(ApiClientOptions( + domains=resolve_domains, + audience="https://api.example.com" +)) + +claims = await api_client.verify_access_token(access_token) +``` + +The resolver is called on every token verification. It receives a `DomainsResolverContext` with the unverified issuer and (if available) the request URL and headers. It must return a non-empty list of allowed domain strings. + +### Hybrid Mode (domain + domains) + +For migration scenarios where you need `domain` for client-initiated flows (token exchange, connection tokens) and `domains` for token verification: + +```python +api_client = ApiClient(ApiClientOptions( + domain="tenant.auth0.com", # Used for token exchange discovery + domains=[ # Used for token verification + "tenant.auth0.com", + "auth.newdomain.com" + ], + audience="https://api.example.com", + client_id="", + client_secret="" +)) + +# Token verification uses the domains list +claims = await api_client.verify_access_token(access_token) + +# Token exchange uses the domain parameter for discovery +result = await api_client.get_token_by_exchange_profile( + subject_token=access_token, + subject_token_type="urn:example:subject-token" +) +``` + +--- + +## Resolver Patterns + +### Host-Header Based + +Route allowed domains based on the incoming request's host: + +```python +def host_based_resolver(context: DomainsResolverContext) -> list[str]: + host = (context.get("request_headers") or {}).get("host", "") + + domain_map = { + "api.us.example.com": ["us-tenant.auth0.com", "auth.us.example.com"], + "api.eu.example.com": ["eu-tenant.auth0.com", "auth.eu.example.com"], + } + return domain_map.get(host, ["default-tenant.auth0.com"]) + +api_client = ApiClient(ApiClientOptions( + domains=host_based_resolver, + audience="https://api.example.com" +)) + +# Pass request context through verify_request +claims = await api_client.verify_request( + headers=request.headers, + http_url=str(request.url) +) +``` + +### Tenant Lookup + +Resolve domains from a database or configuration service: + +```python +def tenant_resolver(context: DomainsResolverContext) -> list[str]: + # Look up allowed domains from your tenant registry + # The unverified_iss tells you which issuer the token claims to be from + issuer = context["unverified_iss"] + + # Your lookup logic here (database, config file, etc.) + allowed = get_domains_for_issuer(issuer) + return allowed +``` + +> [!NOTE] +> The resolver runs synchronously. If your lookup requires async I/O (database queries, HTTP calls), wrap it with `asyncio.run()` or pre-load the domain mapping at startup. + +--- + +## MCD with DPoP + +MCD works with DPoP authentication. When using `verify_request()`, the SDK handles both MCD domain validation and DPoP proof verification: + +```python +api_client = ApiClient(ApiClientOptions( + domains=["tenant.auth0.com", "auth.example.com"], + audience="https://api.example.com", + dpop_required=True +)) + +claims = await api_client.verify_request( + headers={ + "authorization": "DPoP eyJ0eXAiOiJKV1Q...", + "dpop": "eyJ0eXAiOiJkcG9wK2p3dC..." + }, + http_method="GET", + http_url="https://api.example.com/resource" +) +``` + +The verification order is: extract issuer from token -> validate issuer against allowed domains -> perform OIDC discovery from the token's issuer -> verify token signature -> verify DPoP proof. + +--- + +## Error Handling + +### Configuration Errors + +Raised at initialization when the SDK configuration is invalid: + +```python +from auth0_api_python import ApiClient, ApiClientOptions, ConfigurationError + +# Neither domain nor domains provided +try: + api_client = ApiClient(ApiClientOptions(audience="https://api.example.com")) +except ConfigurationError as e: + print(e) # "Must provide either 'domain' or 'domains' parameter..." + +# Empty domains list +try: + api_client = ApiClient(ApiClientOptions(domains=[], audience="https://api.example.com")) +except ConfigurationError as e: + print(e) # "domains list cannot be empty" + +# Invalid domains type +try: + api_client = ApiClient(ApiClientOptions(domains="not-a-list", audience="https://api.example.com")) +except ConfigurationError as e: + print(e) # "domains must be either a list of domain strings or a callable..." +``` + +### Resolver Errors + +Raised when the dynamic resolver function fails: + +```python +from auth0_api_python import DomainsResolverError +from auth0_api_python.errors import VerifyAccessTokenError + +# Resolver raises an exception +try: + claims = await api_client.verify_access_token(token) +except DomainsResolverError as e: + print(e) # "Domains resolver function failed: " + e.get_status_code() # 500 + e.get_error_code() # "domains_resolver_error" + +# Resolver returns invalid type or empty list +except DomainsResolverError as e: + print(e) # "Domains resolver must return a list" or "Domains resolver returned an empty list" +``` + +### Issuer Rejection + +Raised when a token's issuer is not in the allowed domains: + +```python +try: + claims = await api_client.verify_access_token(token) +except VerifyAccessTokenError as e: + print(e) # "Token issuer is not in the list of allowed domains" + e.get_status_code() # 401 + e.get_error_code() # "invalid_token" + e.get_headers() # {"WWW-Authenticate": "Bearer error=\"invalid_token\", ..."} +``` + +--- + +## Migration Guide + +### Single Domain to MCD + +Migrate from a single Auth0 domain to multiple custom domains with zero downtime: + +```python +# Phase 1: Start with single domain (current state) +client = ApiClient(ApiClientOptions( + domain="tenant.auth0.com", + audience="https://api.example.com" +)) + +# Phase 2: Add new domain alongside existing (during migration) +# Tokens from both domains are now accepted +client = ApiClient(ApiClientOptions( + domain="tenant.auth0.com", + domains=["tenant.auth0.com", "auth.newdomain.com"], + audience="https://api.example.com" +)) + +# Phase 3: Full MCD with all domains (after migration) +client = ApiClient(ApiClientOptions( + domain="tenant.auth0.com", + domains=["tenant.auth0.com", "auth.newdomain.com", "auth.other.com"], + audience="https://api.example.com" +)) +``` + +### Rollback from MCD + +To revert to single domain, remove the `domains` parameter: + +```python +# Rollback: only the configured domain's tokens are accepted +client = ApiClient(ApiClientOptions( + domain="tenant.auth0.com", + audience="https://api.example.com" +)) +``` + +--- + +## Configuration Reference + +| Parameter | Type | Default | Description | +|---|---|---|---| +| `domain` | `str` | `None` | Single Auth0 domain. Used for client-initiated flows (token exchange, connection tokens) and single-domain verification. | +| `domains` | `list[str]` or `callable` | `None` | List of allowed domains or a resolver function. Used for token verification in MCD mode. | +| `cache_ttl_seconds` | `int` | `600` | Maximum TTL for cached discovery metadata and JWKS (seconds). The effective TTL is `min(server_max_age, cache_ttl_seconds)`. | +| `cache_max_entries` | `int` | `100` | Maximum entries per cache before LRU eviction. Each issuer uses one discovery entry and one JWKS entry. | +| `cache_adapter` | `CacheAdapter` | `None` | Custom cache backend. See [Caching Guide](Caching.md) for details. | + +At least one of `domain` or `domains` must be provided. When both are provided, `domains` is used for token verification and `domain` is used for client-initiated flows. + +### Domain Normalization + +Domains are normalized automatically. All of these are equivalent: + +- `"tenant.auth0.com"` +- `"TENANT.AUTH0.COM"` +- `"https://tenant.auth0.com"` +- `"https://tenant.auth0.com/"` +- `" tenant.auth0.com "` diff --git a/poetry.lock b/poetry.lock index 6b1aaa2..a825c13 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7,6 +7,7 @@ description = "URL parser and manipulator based on the WHAT WG URL standard" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_full_version < \"3.14.0\" or platform_python_implementation == \"PyPy\"" files = [ {file = "ada_url-1.27.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:00d4da718deb454c4a5a0dc4ebcb3a98c63162654027571a504cce36ff79da66"}, {file = "ada_url-1.27.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:686e3d43bd2d6f8be2d812ddb3e165a0f449018d00a68ca7df5a9d41f89efb44"}, @@ -64,37 +65,88 @@ files = [ [package.dependencies] cffi = "*" +[[package]] +name = "ada-url" +version = "1.28.0" +description = "URL parser and manipulator based on the WHAT WG URL standard" +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "python_full_version >= \"3.14.0\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "ada_url-1.28.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:6a1ba88ff019cfd9431cc1bfd2f3e09a52cc5e34a3a7ea210e7c481dfc6a9c66"}, + {file = "ada_url-1.28.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:dcdd671f21185ff08e5ca1cd43c89cfe4679082653533a21fbbf0bca48d997e8"}, + {file = "ada_url-1.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7ac391be8ca9087a8adb20d077fefecfbd46581c84c232e75b96387b85d1cc17"}, + {file = "ada_url-1.28.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2945d5f27d1e3fba8cde2e715de5f379b469525397d0893e23764b0d3c208bf7"}, + {file = "ada_url-1.28.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:72ff8021b370736781063bda20ba448b3ad22201474f0ff5ef1ee4685a897df5"}, + {file = "ada_url-1.28.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c10aa990dc76f01624083368ab518ab0a4d5ff5c38b259400e367ebd214cdcbf"}, + {file = "ada_url-1.28.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dface5370c9de3f649bfd4a0f481d4a00a2cf88926419d3aed65d5dde988e58b"}, + {file = "ada_url-1.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:cbd444a27a3898f359608a486cee3d6013341cc81c7252d66ddc018a118980a7"}, + {file = "ada_url-1.28.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:add17b8d1d3cefa819c35f6d7a5da8ea31b14c4e7568cfe0e9ad9b9c9221fd0c"}, + {file = "ada_url-1.28.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:9f7944503f237c2baae9da507a0796772fe18f92b7c864827b08beb71094a711"}, + {file = "ada_url-1.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5953813104dfa1d9e0b2bf9619bb4fbe69a127a6a90c4b8cc9d56c46d66a419b"}, + {file = "ada_url-1.28.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d53006d0fd97e34e013e14997f882717124b08721a39b004650a68447adba48"}, + {file = "ada_url-1.28.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:918b66b7a108c0f4cf9085c7ff8598b3e49aee1fb622a0e2101d08b1d32aaf4e"}, + {file = "ada_url-1.28.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d6aaf0eb389b15a1e219b0c9a07d1996dca75f5ed021cd22b6800328a91c71a"}, + {file = "ada_url-1.28.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6523b121d4b0252e7eb2febcf1d8487f01655c51924709657538751eba4aa142"}, + {file = "ada_url-1.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:7a75c0fc288cfa1a67164eef907f834d6d9c039d6270133be3c56055019f44be"}, + {file = "ada_url-1.28.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:4818a86e7d9731206d44695032baa5928d63d6cebd62435af3a5efe07d3b596a"}, + {file = "ada_url-1.28.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2c8b7fce157d2dac2636f649bf8239d8a372ec4b79c3d16dde781f3a9c7584b3"}, + {file = "ada_url-1.28.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:066f604d43b8a3c39bd8089740526f9580d8c8292aaa5b94846c4180aceead51"}, + {file = "ada_url-1.28.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a0424184a215b95bd86e661e7ac5085eb86a5b06454fc582dcb520e771bc9949"}, + {file = "ada_url-1.28.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90c3b10b9165a5f2e325d1426bb8ed1fe35b4070af54669c26bae5860795b4d7"}, + {file = "ada_url-1.28.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4b8346bdc3e5bc94f5977e2151491bd49390dbe33a03a55fb39d16e180e83f4"}, + {file = "ada_url-1.28.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:859e1a9237fb3020e998d7a80b629e9c37439f4c2642f28e4d708a9e669e972a"}, + {file = "ada_url-1.28.0-cp312-cp312-win_amd64.whl", hash = "sha256:902696d75b46643803aa4b5c723fa743f35eceed9468cad16881f0f7c9870fa9"}, + {file = "ada_url-1.28.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:212f8737210bd375a366a09dce74f46baae9bcba9d84d10cd12c564bd0bd80e3"}, + {file = "ada_url-1.28.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:742415d1c283e4f813b34e1b3a7b8a8763da27064433d069a371440f59eccaa2"}, + {file = "ada_url-1.28.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9455bf4e3335732696eade2f6b8bdd029e01a64a722ac009607adb63eb7fa951"}, + {file = "ada_url-1.28.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cb7d8c9cedb6044a66a6bfa08cf9b9883c6746ec94fb279d77c1267e127c0aef"}, + {file = "ada_url-1.28.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f0f3f39e0a2195d36c4892bc4a8dc6416fa1d2c942444c672a9bd031c575134"}, + {file = "ada_url-1.28.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2ecde1d69407c8966814cf8638c3ae27a519927326e112dbbe7a8fddc7e62dd5"}, + {file = "ada_url-1.28.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9350c0fc398029a9d204946fead03370d6d75d1715268c41fcaf888e85c85dde"}, + {file = "ada_url-1.28.0-cp313-cp313-win_amd64.whl", hash = "sha256:c85d531dcd7845d0cb1ba437f5741a2bda9ff6b8f99c21d003eecb5c36b06533"}, + {file = "ada_url-1.28.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8aff37a33027043d3528c39900be10905d42c6c950c8313ac93779661cff558a"}, + {file = "ada_url-1.28.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8bd4beec34721f028b393d7e03793d4c8890f5088c2a95047c339e36aeaa0df"}, + {file = "ada_url-1.28.0-pp310-pypy310_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92869a9663b3bbd49565342d437c5b11accb88a2cd9e3c66ad46196e2c53ed28"}, + {file = "ada_url-1.28.0-pp310-pypy310_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98f9b2cc5853e572b0b97cf17eabd5d39d0330db019a7f0e12d8458e1c19fe6c"}, + {file = "ada_url-1.28.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:beaeec2f101dd21fa32fcb335b7bcb933da9809ec7be90703380b0d739b223a8"}, + {file = "ada_url-1.28.0.tar.gz", hash = "sha256:ff2115679335f698da64e846913061cbb3064de35f1bea5d8f8e5c1b87756702"}, +] + +[package.dependencies] +cffi = "*" + [[package]] name = "anyio" -version = "4.11.0" +version = "4.12.1" description = "High-level concurrency and networking framework on top of asyncio or Trio" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc"}, - {file = "anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4"}, + {file = "anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c"}, + {file = "anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" -sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -trio = ["trio (>=0.31.0)"] +trio = ["trio (>=0.31.0) ; python_version < \"3.10\"", "trio (>=0.32.0) ; python_version >= \"3.10\""] [[package]] name = "authlib" -version = "1.6.5" +version = "1.6.6" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a"}, - {file = "authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b"}, + {file = "authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd"}, + {file = "authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e"}, ] [package.dependencies] @@ -102,14 +154,14 @@ cryptography = "*" [[package]] name = "certifi" -version = "2025.10.5" +version = "2026.1.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" groups = ["main", "dev"] files = [ - {file = "certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"}, - {file = "certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"}, + {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, + {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, ] [[package]] @@ -352,6 +404,7 @@ description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_full_version < \"3.14.0\" or platform_python_implementation == \"PyPy\"" files = [ {file = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}, {file = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}, @@ -465,6 +518,112 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +[[package]] +name = "coverage" +version = "7.13.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_full_version >= \"3.14.0\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147"}, + {file = "coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29"}, + {file = "coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f"}, + {file = "coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1"}, + {file = "coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88"}, + {file = "coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba"}, + {file = "coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19"}, + {file = "coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a"}, + {file = "coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c"}, + {file = "coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3"}, + {file = "coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c"}, + {file = "coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7"}, + {file = "coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6"}, + {file = "coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c"}, + {file = "coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78"}, + {file = "coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784"}, + {file = "coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461"}, + {file = "coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500"}, + {file = "coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9"}, + {file = "coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc"}, + {file = "coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53"}, + {file = "coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842"}, + {file = "coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2"}, + {file = "coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09"}, + {file = "coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894"}, + {file = "coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9"}, + {file = "coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5"}, + {file = "coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a"}, + {file = "coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0"}, + {file = "coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a"}, + {file = "coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416"}, + {file = "coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f"}, + {file = "coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79"}, + {file = "coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4"}, + {file = "coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573"}, + {file = "coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + [[package]] name = "cryptography" version = "43.0.3" @@ -472,6 +631,7 @@ description = "cryptography is a package which provides cryptographic recipes an optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_full_version < \"3.14.0\" or platform_python_implementation == \"PyPy\"" files = [ {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, @@ -515,17 +675,95 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "cryptography" +version = "46.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.8" +groups = ["main"] +markers = "python_full_version >= \"3.14.0\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926"}, + {file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71"}, + {file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac"}, + {file = "cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018"}, + {file = "cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb"}, + {file = "cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c"}, + {file = "cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3"}, + {file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20"}, + {file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de"}, + {file = "cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914"}, + {file = "cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db"}, + {file = "cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21"}, + {file = "cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506"}, + {file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963"}, + {file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4"}, + {file = "cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df"}, + {file = "cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f"}, + {file = "cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372"}, + {file = "cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32"}, + {file = "cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c"}, + {file = "cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1"}, +] + +[package.dependencies] +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox[uv] (>=2024.4.15)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==46.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["main", "dev"] markers = "python_version < \"3.11\"" files = [ - {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, - {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, ] [package.dependencies] @@ -630,11 +868,25 @@ description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_full_version < \"3.14.0\" or platform_python_implementation == \"PyPy\"" files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "iniconfig" +version = "2.3.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_full_version >= \"3.14.0\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, +] + [[package]] name = "packaging" version = "25.0" @@ -670,12 +922,25 @@ description = "C parser in Python" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "implementation_name != \"PyPy\"" +markers = "(python_full_version < \"3.14.0\" or platform_python_implementation == \"PyPy\") and implementation_name != \"PyPy\"" files = [ {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, ] +[[package]] +name = "pycparser" +version = "3.0" +description = "C parser in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "python_full_version >= \"3.14.0\" and platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" +files = [ + {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"}, + {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"}, +] + [[package]] name = "pygments" version = "2.19.2" @@ -829,31 +1094,31 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.14.1" +version = "0.14.13" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.14.1-py3-none-linux_armv6l.whl", hash = "sha256:083bfc1f30f4a391ae09c6f4f99d83074416b471775b59288956f5bc18e82f8b"}, - {file = "ruff-0.14.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f6fa757cd717f791009f7669fefb09121cc5f7d9bd0ef211371fad68c2b8b224"}, - {file = "ruff-0.14.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6191903d39ac156921398e9c86b7354d15e3c93772e7dbf26c9fcae59ceccd5"}, - {file = "ruff-0.14.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed04f0e04f7a4587244e5c9d7df50e6b5bf2705d75059f409a6421c593a35896"}, - {file = "ruff-0.14.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5c9e6cf6cd4acae0febbce29497accd3632fe2025c0c583c8b87e8dbdeae5f61"}, - {file = "ruff-0.14.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fa2458527794ecdfbe45f654e42c61f2503a230545a91af839653a0a93dbc6"}, - {file = "ruff-0.14.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:39f1c392244e338b21d42ab29b8a6392a722c5090032eb49bb4d6defcdb34345"}, - {file = "ruff-0.14.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7382fa12a26cce1f95070ce450946bec357727aaa428983036362579eadcc5cf"}, - {file = "ruff-0.14.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd0bf2be3ae8521e1093a487c4aa3b455882f139787770698530d28ed3fbb37c"}, - {file = "ruff-0.14.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabcaa9ccf8089fb4fdb78d17cc0e28241520f50f4c2e88cb6261ed083d85151"}, - {file = "ruff-0.14.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:747d583400f6125ec11a4c14d1c8474bf75d8b419ad22a111a537ec1a952d192"}, - {file = "ruff-0.14.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5a6e74c0efd78515a1d13acbfe6c90f0f5bd822aa56b4a6d43a9ffb2ae6e56cd"}, - {file = "ruff-0.14.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0ea6a864d2fb41a4b6d5b456ed164302a0d96f4daac630aeba829abfb059d020"}, - {file = "ruff-0.14.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0826b8764f94229604fa255918d1cc45e583e38c21c203248b0bfc9a0e930be5"}, - {file = "ruff-0.14.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cbc52160465913a1a3f424c81c62ac8096b6a491468e7d872cb9444a860bc33d"}, - {file = "ruff-0.14.1-py3-none-win32.whl", hash = "sha256:e037ea374aaaff4103240ae79168c0945ae3d5ae8db190603de3b4012bd1def6"}, - {file = "ruff-0.14.1-py3-none-win_amd64.whl", hash = "sha256:59d599cdff9c7f925a017f6f2c256c908b094e55967f93f2821b1439928746a1"}, - {file = "ruff-0.14.1-py3-none-win_arm64.whl", hash = "sha256:e3b443c4c9f16ae850906b8d0a707b2a4c16f8d2f0a7fe65c475c5886665ce44"}, - {file = "ruff-0.14.1.tar.gz", hash = "sha256:1dd86253060c4772867c61791588627320abcb6ed1577a90ef432ee319729b69"}, + {file = "ruff-0.14.13-py3-none-linux_armv6l.whl", hash = "sha256:76f62c62cd37c276cb03a275b198c7c15bd1d60c989f944db08a8c1c2dbec18b"}, + {file = "ruff-0.14.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:914a8023ece0528d5cc33f5a684f5f38199bbb566a04815c2c211d8f40b5d0ed"}, + {file = "ruff-0.14.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d24899478c35ebfa730597a4a775d430ad0d5631b8647a3ab368c29b7e7bd063"}, + {file = "ruff-0.14.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aaf3870f14d925bbaf18b8a2347ee0ae7d95a2e490e4d4aea6813ed15ebc80e"}, + {file = "ruff-0.14.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac5b7f63dd3b27cc811850f5ffd8fff845b00ad70e60b043aabf8d6ecc304e09"}, + {file = "ruff-0.14.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d2b1097750d90ba82ce4ba676e85230a0ed694178ca5e61aa9b459970b3eb9"}, + {file = "ruff-0.14.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d0bf87705acbbcb8d4c24b2d77fbb73d40210a95c3903b443cd9e30824a5032"}, + {file = "ruff-0.14.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3eb5da8e2c9e9f13431032fdcbe7681de9ceda5835efee3269417c13f1fed5c"}, + {file = "ruff-0.14.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:642442b42957093811cd8d2140dfadd19c7417030a7a68cf8d51fcdd5f217427"}, + {file = "ruff-0.14.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4acdf009f32b46f6e8864af19cbf6841eaaed8638e65c8dac845aea0d703c841"}, + {file = "ruff-0.14.13-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:591a7f68860ea4e003917d19b5c4f5ac39ff558f162dc753a2c5de897fd5502c"}, + {file = "ruff-0.14.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:774c77e841cc6e046fc3e91623ce0903d1cd07e3a36b1a9fe79b81dab3de506b"}, + {file = "ruff-0.14.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:61f4e40077a1248436772bb6512db5fc4457fe4c49e7a94ea7c5088655dd21ae"}, + {file = "ruff-0.14.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6d02f1428357fae9e98ac7aa94b7e966fd24151088510d32cf6f902d6c09235e"}, + {file = "ruff-0.14.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e399341472ce15237be0c0ae5fbceca4b04cd9bebab1a2b2c979e015455d8f0c"}, + {file = "ruff-0.14.13-py3-none-win32.whl", hash = "sha256:ef720f529aec113968b45dfdb838ac8934e519711da53a0456038a0efecbd680"}, + {file = "ruff-0.14.13-py3-none-win_amd64.whl", hash = "sha256:6070bd026e409734b9257e03e3ef18c6e1a216f0435c6751d7a8ec69cb59abef"}, + {file = "ruff-0.14.13-py3-none-win_arm64.whl", hash = "sha256:7ab819e14f1ad9fe39f246cfcc435880ef7a9390d81a2b6ac7e01039083dd247"}, + {file = "ruff-0.14.13.tar.gz", hash = "sha256:83cd6c0763190784b99650a20fec7633c59f6ebe41c5cc9d45ee42749563ad47"}, ] [[package]] @@ -868,69 +1133,62 @@ files = [ {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -groups = ["main", "dev"] -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - [[package]] name = "tomli" -version = "2.3.0" +version = "2.4.0" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev"] markers = "python_full_version <= \"3.11.0a6\"" files = [ - {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, - {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, - {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, - {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, - {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, - {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, - {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, - {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, - {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, - {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, - {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, - {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, - {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, - {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, - {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, - {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, - {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, - {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, - {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, - {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, - {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, - {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, - {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, - {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, - {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, - {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, - {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, - {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, - {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, - {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, - {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, - {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, - {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, - {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, - {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, - {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, - {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, - {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, - {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, - {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, - {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, - {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, + {file = "tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867"}, + {file = "tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9"}, + {file = "tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95"}, + {file = "tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76"}, + {file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d"}, + {file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576"}, + {file = "tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a"}, + {file = "tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa"}, + {file = "tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614"}, + {file = "tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1"}, + {file = "tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8"}, + {file = "tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a"}, + {file = "tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1"}, + {file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b"}, + {file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51"}, + {file = "tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729"}, + {file = "tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da"}, + {file = "tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3"}, + {file = "tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0"}, + {file = "tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e"}, + {file = "tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4"}, + {file = "tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e"}, + {file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c"}, + {file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f"}, + {file = "tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86"}, + {file = "tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87"}, + {file = "tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132"}, + {file = "tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6"}, + {file = "tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc"}, + {file = "tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66"}, + {file = "tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d"}, + {file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702"}, + {file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8"}, + {file = "tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776"}, + {file = "tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475"}, + {file = "tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2"}, + {file = "tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9"}, + {file = "tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0"}, + {file = "tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df"}, + {file = "tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d"}, + {file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f"}, + {file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b"}, + {file = "tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087"}, + {file = "tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd"}, + {file = "tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4"}, + {file = "tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a"}, + {file = "tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c"}, ] [[package]] @@ -948,21 +1206,21 @@ files = [ [[package]] name = "urllib3" -version = "2.5.0" +version = "2.6.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, - {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, + {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, + {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] [metadata] lock-version = "2.1" diff --git a/src/auth0_api_python/__init__.py b/src/auth0_api_python/__init__.py index 20faa8c..ef27ea2 100644 --- a/src/auth0_api_python/__init__.py +++ b/src/auth0_api_python/__init__.py @@ -6,12 +6,25 @@ """ from .api_client import ApiClient +from .cache import CacheAdapter, InMemoryCache from .config import ApiClientOptions -from .errors import ApiError, GetTokenByExchangeProfileError +from .errors import ( + ApiError, + ConfigurationError, + DomainsResolverError, + GetTokenByExchangeProfileError, +) +from .types import DomainsResolver, DomainsResolverContext __all__ = [ "ApiClient", "ApiClientOptions", "ApiError", - "GetTokenByExchangeProfileError" + "CacheAdapter", + "ConfigurationError", + "DomainsResolver", + "DomainsResolverContext", + "DomainsResolverError", + "GetTokenByExchangeProfileError", + "InMemoryCache", ] diff --git a/src/auth0_api_python/api_client.py b/src/auth0_api_python/api_client.py index d9faafb..8b22974 100644 --- a/src/auth0_api_python/api_client.py +++ b/src/auth0_api_python/api_client.py @@ -5,10 +5,13 @@ import httpx from authlib.jose import JsonWebKey, JsonWebToken +from .cache import InMemoryCache from .config import ApiClientOptions from .errors import ( ApiError, BaseAuthError, + ConfigurationError, + DomainsResolverError, GetAccessTokenForConnectionError, GetTokenByExchangeProfileError, InvalidAuthSchemeError, @@ -22,6 +25,8 @@ fetch_jwks, fetch_oidc_metadata, get_unverified_header, + get_unverified_payload, + normalize_domain, normalize_url_for_htu, sha256_base64url, ) @@ -48,14 +53,46 @@ class ApiClient: """ def __init__(self, options: ApiClientOptions): - if not options.domain: - raise MissingRequiredArgumentError("domain") + # Validate audience is always required if not options.audience: raise MissingRequiredArgumentError("audience") + + # Validate domains parameter if provided + if options.domains is not None: + if isinstance(options.domains, list): + # Static list validation + if len(options.domains) == 0: + raise ConfigurationError("domains list cannot be empty") + # Normalize and store domains + self._allowed_domains = [normalize_domain(d) for d in options.domains] + elif callable(options.domains): + # Dynamic resolver - store the function + self._allowed_domains = options.domains + else: + raise ConfigurationError( + "domains must be either a list of domain strings or a callable resolver function" + ) + else: + # Single domain mode + self._allowed_domains = None + + # Validate domain/domains configuration + if not options.domain and not options.domains: + raise ConfigurationError( + "Must provide either 'domain' or 'domains' parameter. " + "Use 'domain' for single-domain mode, 'domains' for multi-domain support." + ) self.options = options - self._metadata: Optional[dict[str, Any]] = None - self._jwks_data: Optional[dict[str, Any]] = None + + if options.cache_adapter: + self._discovery_cache = options.cache_adapter + self._jwks_cache = options.cache_adapter + else: + self._discovery_cache = InMemoryCache(max_entries=options.cache_max_entries) + self._jwks_cache = InMemoryCache(max_entries=options.cache_max_entries) + + self._cache_ttl = options.cache_ttl_seconds self._jwt = JsonWebToken(["RS256"]) @@ -66,6 +103,80 @@ def is_dpop_required(self) -> bool: """Check if DPoP authentication is required.""" return getattr(self.options, "dpop_required", False) + async def _resolve_allowed_domains( + self, + unverified_iss: str, + request_url: Optional[str] = None, + request_headers: Optional[dict] = None + ) -> list[str]: + """ + Resolve and validate allowed domains for the given issuer. + + Handles three modes: + 1. Static list: Returns normalized list, validates issuer against it + 2. Dynamic resolver: Invokes resolver function, validates issuer against result + 3. Single domain: Returns None (backward compatibility, uses domain) + + Args: + unverified_iss: The issuer claim from the token (not yet verified) + request_url: Optional request URL for dynamic resolvers + request_headers: Optional request headers for dynamic resolvers + + Returns: + List of normalized allowed domain strings + + Raises: + DomainsResolverError: If resolver invocation fails + VerifyAccessTokenError: If issuer is not in allowed domains + """ + # Single domain mode + if self._allowed_domains is None: + return None + + # Static list mode + if isinstance(self._allowed_domains, list): + allowed_domains = self._allowed_domains + # Dynamic resolver mode + elif callable(self._allowed_domains): + # Build resolver context + context = { + 'request_url': request_url, + 'request_headers': request_headers, + 'unverified_iss': unverified_iss + } + + # Invoke resolver + try: + result = self._allowed_domains(context) + except Exception as e: + raise DomainsResolverError( + f"Domains resolver function failed: {str(e)}" + ) from e + + # Validate resolver result + if not isinstance(result, list): + raise DomainsResolverError( + "Domains resolver must return a list" + ) + + if len(result) == 0: + raise DomainsResolverError( + "Domains resolver returned an empty list" + ) + + # Normalize domains from resolver + allowed_domains = [normalize_domain(d) for d in result] + else: + # Should never happen due to __init__ validation + raise ConfigurationError("Invalid _allowed_domains type") + + # Validate issuer is in allowed domains + if unverified_iss not in allowed_domains: + raise VerifyAccessTokenError( + "Token issuer is not in the list of allowed domains" + ) + + return allowed_domains async def verify_request( self, @@ -89,7 +200,7 @@ async def verify_request( - "authorization": The Authorization header value (required) - "dpop": The DPoP proof header value (required for DPoP) http_method: The HTTP method (required for DPoP) - http_url: The HTTP URL (required for DPoP) + http_url: The HTTP URL (required for DPoP, also used for MCD resolver context) Returns: The decoded access token claims @@ -171,7 +282,11 @@ async def verify_request( ) try: - access_token_claims = await self.verify_access_token(token) + access_token_claims = await self.verify_access_token( + token, + request_url=http_url, + request_headers=headers + ) except VerifyAccessTokenError as e: raise self._prepare_error(e, auth_scheme=scheme) @@ -219,7 +334,11 @@ async def verify_request( if scheme == "bearer": try: - claims = await self.verify_access_token(token) + claims = await self.verify_access_token( + token, + request_url=http_url, + request_headers=headers + ) if claims.get("cnf") and isinstance(claims["cnf"], dict) and claims["cnf"].get("jkt"): if self.options.dpop_enabled: raise self._prepare_error( @@ -245,6 +364,8 @@ async def verify_request( async def verify_access_token( self, access_token: str, + request_url: Optional[str] = None, + request_headers: Optional[dict] = None, required_claims: Optional[list[str]] = None ) -> dict[str, Any]: """ @@ -255,25 +376,107 @@ async def verify_access_token( - Checks standard claims: 'iss', 'aud', 'exp', 'iat' - Checks extra required claims if 'required_claims' is provided. + Args: + access_token: The JWT access token to verify + request_url: Optional request URL for dynamic domain resolvers + request_headers: Optional request headers dict for dynamic domain resolvers + required_claims: Optional list of additional claim names that must be present + Returns: The decoded token claims if valid. Raises: MissingRequiredArgumentError: If no token is provided. VerifyAccessTokenError: If verification fails (signature, claims mismatch, etc.). + DomainsResolverError: If domains resolver function fails. """ if not access_token: raise MissingRequiredArgumentError("access_token") required_claims = required_claims or [] + # Extract header and payload without signature verification try: header = get_unverified_header(access_token) - kid = header["kid"] except Exception as e: raise VerifyAccessTokenError(f"Failed to parse token header: {str(e)}") from e - jwks_data = await self._load_jwks() + # Reject symmetric algorithms + alg = header.get('alg', '') + if alg.startswith('HS'): + raise VerifyAccessTokenError( + f"Symmetric algorithm '{alg}' is not supported. " + "Only asymmetric algorithms (e.g., RS256) are allowed." + ) + + # Extract and validate issuer claim (before network calls) + try: + unverified_payload = get_unverified_payload(access_token) + except Exception as e: + raise VerifyAccessTokenError(f"Failed to parse token payload: {str(e)}") from e + + unverified_iss = unverified_payload.get('iss') + if not unverified_iss: + raise VerifyAccessTokenError("Token missing 'iss' claim") + + # Normalize issuer for validation + normalized_iss = normalize_domain(unverified_iss) + + # Validate issuer against allowed domains (MCD) + if self._allowed_domains is not None: + await self._resolve_allowed_domains( + normalized_iss, + request_url=request_url, + request_headers=request_headers + ) + + # Fetch OIDC discovery metadata + try: + if self._allowed_domains is not None: + metadata = await self._discover(issuer=normalized_iss) + else: + metadata = await self._discover() + except VerifyAccessTokenError: + raise + except Exception as e: + raise VerifyAccessTokenError( + f"Failed to fetch OIDC discovery metadata: {str(e)}" + ) from e + + # First issuer validation: Prevent issuer confusion attacks + discovery_issuer = metadata.get("issuer") + if not discovery_issuer: + raise VerifyAccessTokenError("Discovery metadata missing 'issuer' field") + + # Normalize discovery issuer for comparison + normalized_discovery_issuer = normalize_domain(discovery_issuer) + + if normalized_iss != normalized_discovery_issuer: + raise VerifyAccessTokenError( + "Token issuer does not match the discovery issuer" + ) + + # Extract JWKS URI from discovery metadata + jwks_uri = metadata.get("jwks_uri") + if not jwks_uri: + raise VerifyAccessTokenError("Discovery metadata missing 'jwks_uri' field") + + # Fetch JWKS from discovery's jwks_uri + try: + jwks_data = await self._fetch_jwks(jwks_uri) + except VerifyAccessTokenError: + raise + except Exception as e: + raise VerifyAccessTokenError( + f"Failed to fetch JWKS: {str(e)}" + ) from e + + # Extract kid for JWKS lookup + kid = header.get("kid") + if not kid: + raise VerifyAccessTokenError("Token header missing 'kid' claim") + + # Find matching key matching_key_dict = None for key_dict in jwks_data["keys"]: if key_dict.get("kid") == kid: @@ -281,8 +484,9 @@ async def verify_access_token( break if not matching_key_dict: - raise VerifyAccessTokenError(f"No matching key found for kid: {kid}") + raise VerifyAccessTokenError("No matching key found in JWKS") + # Import public key and verify signature public_key = JsonWebKey.import_key(matching_key_dict) if isinstance(access_token, str) and access_token.startswith("b'"): @@ -292,11 +496,11 @@ async def verify_access_token( except Exception as e: raise VerifyAccessTokenError(f"Signature verification failed: {str(e)}") from e - metadata = await self._discover() - issuer = metadata["issuer"] - - if claims.get("iss") != issuer: - raise VerifyAccessTokenError("Issuer mismatch") + # Second issuer validation: Ensure verified token wasn't tampered + if claims.get("iss") != discovery_issuer: + raise VerifyAccessTokenError( + "Verified Token issuer does not match the discovery issuer" + ) expected_aud = self.options.audience actual_aud = claims.get("aud") @@ -767,25 +971,73 @@ def _apply_extra( else: params[key] = str(v) - async def _discover(self) -> dict[str, Any]: - """Lazy-load OIDC discovery metadata.""" - if self._metadata is None: - self._metadata = await fetch_oidc_metadata( - domain=self.options.domain, - custom_fetch=self.options.custom_fetch - ) - return self._metadata - - async def _load_jwks(self) -> dict[str, Any]: - """Fetches and caches JWKS data from the OIDC metadata.""" - if self._jwks_data is None: - metadata = await self._discover() - jwks_uri = metadata["jwks_uri"] - self._jwks_data = await fetch_jwks( - jwks_uri=jwks_uri, - custom_fetch=self.options.custom_fetch - ) - return self._jwks_data + async def _discover(self, issuer: Optional[str] = None) -> dict[str, Any]: + """ + Lazy-load OIDC discovery metadata. + + Args: + issuer: Optional issuer URL to fetch discovery from (MCD mode). + If provided, extracts domain from issuer URL. + If None, uses configured domain. + + Returns: + OIDC discovery metadata dictionary + """ + if issuer: + domain = issuer.replace('https://', '').replace('http://', '').rstrip('/') + else: + domain = self.options.domain + + cache_key = normalize_domain(f"https://{domain}") + + cached = self._discovery_cache.get(cache_key) + if cached: + return cached + + metadata, max_age = await fetch_oidc_metadata( + domain=domain, + custom_fetch=self.options.custom_fetch + ) + + effective_ttl = self._cache_ttl + if max_age is not None and self._cache_ttl is not None: + effective_ttl = min(max_age, self._cache_ttl) + elif max_age is not None: + effective_ttl = max_age + + self._discovery_cache.set(cache_key, metadata, ttl_seconds=effective_ttl) + return metadata + + async def _fetch_jwks(self, jwks_uri: str) -> dict[str, Any]: + """ + Fetch JWKS with per-URI caching. + + Args: + jwks_uri: The JWKS URI to fetch from + + Returns: + JWKS data dictionary + + """ + cache_key = jwks_uri + + cached = self._jwks_cache.get(cache_key) + if cached: + return cached + + jwks_data, max_age = await fetch_jwks( + jwks_uri=jwks_uri, + custom_fetch=self.options.custom_fetch + ) + + effective_ttl = self._cache_ttl + if max_age is not None and self._cache_ttl is not None: + effective_ttl = min(max_age, self._cache_ttl) + elif max_age is not None: + effective_ttl = max_age + + self._jwks_cache.set(cache_key, jwks_data, ttl_seconds=effective_ttl) + return jwks_data def _validate_claims_presence( self, diff --git a/src/auth0_api_python/cache.py b/src/auth0_api_python/cache.py new file mode 100644 index 0000000..2787709 --- /dev/null +++ b/src/auth0_api_python/cache.py @@ -0,0 +1,164 @@ +from abc import ABC, abstractmethod +from typing import Optional, Any +from datetime import datetime, timedelta + + +class CacheAdapter(ABC): + """ + Abstract base class for cache implementations. + + Allows custom cache backends (Redis, Memcached, etc.) to be plugged into + the ApiClient for caching OIDC discovery metadata and JWKS. + + Example: + class RedisCache(CacheAdapter): + def __init__(self, redis_client): + self.redis = redis_client + + def get(self, key: str) -> Optional[Any]: + value = self.redis.get(key) + return json.loads(value) if value else None + + def set(self, key: str, value: Any, ttl_seconds: Optional[int] = None) -> None: + self.redis.set(key, json.dumps(value), ex=ttl_seconds) + + def delete(self, key: str) -> None: + self.redis.delete(key) + + def clear(self) -> None: + self.redis.flushdb() + """ + + @abstractmethod + def get(self, key: str) -> Optional[Any]: + """ + Get value from cache by key. + + Args: + key: Cache key to retrieve + + Returns: + Cached value if found and not expired, None otherwise + """ + pass + + @abstractmethod + def set(self, key: str, value: Any, ttl_seconds: Optional[int] = None) -> None: + """ + Set value in cache with optional TTL. + + Args: + key: Cache key to store + value: Value to cache + ttl_seconds: Time-to-live in seconds. None means no expiration. + """ + pass + + @abstractmethod + def delete(self, key: str) -> None: + """ + Delete value from cache. + + Args: + key: Cache key to delete + """ + pass + + @abstractmethod + def clear(self) -> None: + """Clear all cache entries.""" + pass + + +class InMemoryCache(CacheAdapter): + """ + Default in-memory cache implementation with LRU eviction. + + Features: + - TTL (time-to-live) support per entry + - LRU (Least Recently Used) eviction when max_entries reached + - No external dependencies + + Args: + max_entries: Maximum number of entries to cache. When exceeded, + least recently used entry is evicted. Default: 100. + + Example: + cache = InMemoryCache(max_entries=50) + cache.set("key1", {"data": "value"}, ttl_seconds=600) + value = cache.get("key1") # Returns {"data": "value"} + """ + + def __init__(self, max_entries: int = 100): + """ + Initialize in-memory cache. + + Args: + max_entries: Maximum number of cache entries (default: 100) + """ + self._cache: dict[str, tuple[Any, Optional[datetime]]] = {} + self._max_entries = max_entries + + def get(self, key: str) -> Optional[Any]: + """ + Get value from cache by key. + + Updates access order for LRU tracking. + + Args: + key: Cache key to retrieve + + Returns: + Cached value if found and not expired, None otherwise + """ + if key not in self._cache: + return None + + value, expiry = self._cache[key] + + if expiry and datetime.now() > expiry: + del self._cache[key] + return None + + del self._cache[key] + self._cache[key] = (value, expiry) + + return value + + def set(self, key: str, value: Any, ttl_seconds: Optional[int] = None) -> None: + """ + Set value in cache with optional TTL. + + If cache is at max capacity, evicts least recently used entry. + + Args: + key: Cache key to store + value: Value to cache + ttl_seconds: Time-to-live in seconds. None means no expiration. + """ + # If key exists, remove first so reinsert goes to end + if key in self._cache: + del self._cache[key] + elif len(self._cache) >= self._max_entries: + # Evict LRU: first key in dict is oldest + oldest_key = next(iter(self._cache)) + del self._cache[oldest_key] + + expiry = None + if ttl_seconds: + expiry = datetime.now() + timedelta(seconds=ttl_seconds) + + self._cache[key] = (value, expiry) + + def delete(self, key: str) -> None: + """ + Delete value from cache. + + Args: + key: Cache key to delete + """ + self._cache.pop(key, None) + + def clear(self) -> None: + """Clear all cache entries.""" + self._cache.clear() diff --git a/src/auth0_api_python/config.py b/src/auth0_api_python/config.py index 5c312fd..3a9e15c 100644 --- a/src/auth0_api_python/config.py +++ b/src/auth0_api_python/config.py @@ -2,7 +2,10 @@ Configuration classes and utilities for auth0-api-python. """ -from typing import Callable, Optional +from typing import TYPE_CHECKING, Callable, Optional, Union + +if TYPE_CHECKING: + from .cache import CacheAdapter class ApiClientOptions: @@ -10,9 +13,16 @@ class ApiClientOptions: Configuration for the ApiClient. Args: - domain: The Auth0 domain, e.g., "my-tenant.us.auth0.com". + domain: The Auth0 domain for single-domain mode and client flows, + e.g., "my-tenant.us.auth0.com". Optional if domains is provided. + domains: List of allowed domains or a resolver function for multi-domain mode. + Can be a static list of domain strings or a callable that returns + allowed domains dynamically. Optional if domain is provided. audience: The expected 'aud' claim in the token. custom_fetch: Optional callable that can replace the default HTTP fetch logic. + cache_adapter: Custom cache implementation. If not provided, uses default InMemoryCache. + cache_ttl_seconds: Time-to-live for cache entries in seconds (default: 600 = 10 minutes). + cache_max_entries: Maximum number of cache entries before LRU eviction (default: 100). dpop_enabled: Whether DPoP is enabled (default: True for backward compatibility). dpop_required: Whether DPoP is required (default: False, allows both Bearer and DPoP). dpop_iat_leeway: Leeway in seconds for DPoP proof iat claim (default: 30). @@ -23,9 +33,13 @@ class ApiClientOptions: """ def __init__( self, - domain: str, - audience: str, + domain: Optional[str] = None, + audience: str = "", + domains: Optional[Union[list[str], Callable[[dict], list[str]]]] = None, custom_fetch: Optional[Callable[..., object]] = None, + cache_adapter: Optional["CacheAdapter"] = None, + cache_ttl_seconds: int = 600, + cache_max_entries: int = 100, dpop_enabled: bool = True, dpop_required: bool = False, dpop_iat_leeway: int = 30, @@ -35,8 +49,12 @@ def __init__( timeout: float = 10.0, ): self.domain = domain + self.domains = domains self.audience = audience self.custom_fetch = custom_fetch + self.cache_adapter = cache_adapter + self.cache_ttl_seconds = cache_ttl_seconds + self.cache_max_entries = cache_max_entries self.dpop_enabled = dpop_enabled self.dpop_required = dpop_required self.dpop_iat_leeway = dpop_iat_leeway diff --git a/src/auth0_api_python/errors.py b/src/auth0_api_python/errors.py index 0b4af64..a0a43d0 100644 --- a/src/auth0_api_python/errors.py +++ b/src/auth0_api_python/errors.py @@ -140,3 +140,23 @@ def get_status_code(self) -> int: def get_error_code(self) -> str: return self.code + + +class ConfigurationError(BaseAuthError): + """Error raised when SDK configuration is invalid.""" + + def get_status_code(self) -> int: + return 500 + + def get_error_code(self) -> str: + return "invalid_configuration" + + +class DomainsResolverError(BaseAuthError): + """Error raised when domains resolver function fails.""" + + def get_status_code(self) -> int: + return 500 + + def get_error_code(self) -> str: + return "domains_resolver_error" diff --git a/src/auth0_api_python/types.py b/src/auth0_api_python/types.py new file mode 100644 index 0000000..14ce150 --- /dev/null +++ b/src/auth0_api_python/types.py @@ -0,0 +1,48 @@ +""" +Type definitions for auth0-api-python SDK +""" + +from typing import Callable, Optional, TypedDict + + +class DomainsResolverContext(TypedDict, total=False): + """ + Context passed to domains resolver functions. + + Attributes: + request_url: The URL the API request was made to (optional) + request_headers: Request headers dict (e.g., Host, X-Forwarded-Host) (optional) + unverified_iss: The issuer claim from the unverified token (required) + """ + request_url: Optional[str] + request_headers: Optional[dict] + unverified_iss: str # This is required, others are optional + + +DomainsResolver = Callable[[DomainsResolverContext], list[str]] +""" +Type alias for domains resolver function. + +A DomainsResolver is a function that receives a DomainsResolverContext and returns +a list of allowed domain strings. + +Args: + context (DomainsResolverContext): Dictionary containing: + - 'request_url' (str | None): The URL the API request was made to + - 'request_headers' (dict | None): Request headers (e.g., Host, X-Forwarded-Host) + - 'unverified_iss' (str): The issuer claim from the unverified token + +Returns: + list[str]: List of allowed domain strings (e.g., ['tenant.auth0.com']) + +Example: + from auth0_api_python import DomainsResolverContext + + def my_resolver(context: DomainsResolverContext) -> list[str]: + unverified_iss = context['unverified_iss'] + request_url = context.get('request_url') + request_headers = context.get('request_headers') + + # Fetch allowed domains based on context + return ['tenant1.auth0.com', 'tenant2.auth0.com'] +""" diff --git a/src/auth0_api_python/utils.py b/src/auth0_api_python/utils.py index 4ab8051..c499ed0 100644 --- a/src/auth0_api_python/utils.py +++ b/src/auth0_api_python/utils.py @@ -7,57 +7,123 @@ import hashlib import json import re -from typing import Any, Callable, Optional, Union +from typing import Any, Callable, Mapping, Optional, Union import httpx from ada_url import URL +def parse_cache_control_max_age(headers: Mapping[str, str]) -> Optional[int]: + """ + Parse the max-age directive from a Cache-Control HTTP header. + + Args: + headers: HTTP response headers (dict-like, supports case-insensitive + access for httpx Headers objects) + + Returns: + max-age value in seconds, or None if not present or unparseable + """ + cache_control = headers.get("cache-control") or headers.get("Cache-Control") + if not cache_control: + return None + + for directive in cache_control.split(","): + directive = directive.strip().lower() + if directive.startswith("max-age="): + try: + value = int(directive[8:].strip()) + return value if value >= 0 else None + except ValueError: + return None + + return None + + +def normalize_domain(domain: str) -> str: + """ + Normalize a domain string to a standard issuer URL format. + + Args: + domain: Domain string in any format (e.g., "tenant.auth0.com", + "https://tenant.auth0.com/", "TENANT.AUTH0.COM") + + Returns: + Normalized issuer URL (e.g., "https://tenant.auth0.com/") + + """ + domain = domain.strip().lower() + domain = domain.replace('http://', '').replace('https://', '') + domain = domain.rstrip('/') + return f"https://{domain}/" + + async def fetch_oidc_metadata( domain: str, custom_fetch: Optional[Callable[..., Any]] = None -) -> dict[str, Any]: +) -> tuple[dict[str, Any], Optional[int]]: """ Asynchronously fetch the OIDC config from https://{domain}/.well-known/openid-configuration. - Returns a dict with keys like issuer, jwks_uri, authorization_endpoint, etc. - If custom_fetch is provided, we call it instead of httpx. + + Returns: + Tuple of (metadata_dict, max_age_or_none). max_age is parsed from + the Cache-Control response header if present. """ url = f"https://{domain}/.well-known/openid-configuration" if custom_fetch: response = await custom_fetch(url) - return response.json() if hasattr(response, "json") else response + if hasattr(response, "json"): + data = response.json() + max_age = parse_cache_control_max_age(response.headers) if hasattr(response, "headers") else None + return data, max_age + return response, None else: async with httpx.AsyncClient() as client: resp = await client.get(url) resp.raise_for_status() - return resp.json() + max_age = parse_cache_control_max_age(resp.headers) + return resp.json(), max_age async def fetch_jwks( jwks_uri: str, custom_fetch: Optional[Callable[..., Any]] = None -) -> dict[str, Any]: +) -> tuple[dict[str, Any], Optional[int]]: """ Asynchronously fetch the JSON Web Key Set from jwks_uri. - Returns the raw JWKS JSON, e.g. {'keys': [...]} - If custom_fetch is provided, it must be an async callable - that fetches data from the jwks_uri. + Returns: + Tuple of (jwks_dict, max_age_or_none). max_age is parsed from + the Cache-Control response header if present. """ if custom_fetch: response = await custom_fetch(jwks_uri) - return response.json() if hasattr(response, "json") else response + if hasattr(response, "json"): + data = response.json() + max_age = parse_cache_control_max_age(response.headers) if hasattr(response, "headers") else None + return data, max_age + return response, None else: async with httpx.AsyncClient() as client: resp = await client.get(jwks_uri) resp.raise_for_status() - return resp.json() + max_age = parse_cache_control_max_age(resp.headers) + return resp.json(), max_age -def get_unverified_header(token: Union[str, bytes]) -> dict: +def _decode_jwt_segment(token: Union[str, bytes], segment_index: int) -> dict: """ - Parse the first segment (header) of a JWT without verifying signature. - Ensures correct Base64 padding before decode to avoid garbage bytes. + Decode a specific segment from a JWT without verifying signature. + + Args: + token: The JWT token (string or bytes) + segment_index: 0 for header, 1 for payload + + Returns: + Decoded segment as dictionary + + Raises: + ValueError: If token format is invalid """ if isinstance(token, bytes): token = token.decode("utf-8") @@ -66,12 +132,38 @@ def get_unverified_header(token: Union[str, bytes]) -> dict: if len(parts) != 3: raise ValueError(f"Invalid token format: expected 3 segments, got {len(parts)}") - header_b64 = parts[0] - header_b64 = remove_bytes_prefix(header_b64) - header_b64 = fix_base64_padding(header_b64) + segment_b64 = parts[segment_index] + segment_b64 = remove_bytes_prefix(segment_b64) + segment_b64 = fix_base64_padding(segment_b64) + + segment_data = base64.urlsafe_b64decode(segment_b64) + return json.loads(segment_data) + + +def get_unverified_header(token: Union[str, bytes]) -> dict: + """ + Parse the JWT header without verifying signature. + + Args: + token: The JWT token + + Returns: + Decoded header as dictionary + """ + return _decode_jwt_segment(token, 0) + - header_data = base64.urlsafe_b64decode(header_b64) - return json.loads(header_data) +def get_unverified_payload(token: Union[str, bytes]) -> dict: + """ + Parse the JWT payload without verifying signature. + + Args: + token: The JWT token + + Returns: + Decoded payload (claims) as dictionary + """ + return _decode_jwt_segment(token, 1) diff --git a/tests/test_api_client.py b/tests/test_api_client.py index 71c2cc7..bdbfb01 100644 --- a/tests/test_api_client.py +++ b/tests/test_api_client.py @@ -1,6 +1,7 @@ import base64 import json import time +from datetime import datetime import httpx import pytest @@ -51,9 +52,13 @@ async def test_init_missing_args(): """ Test that providing no audience or domain raises an error. """ - with pytest.raises(MissingRequiredArgumentError): + from auth0_api_python.errors import ConfigurationError + + # Empty domain now raises ConfigurationError (not MissingRequiredArgumentError) + with pytest.raises(ConfigurationError): _ = ApiClient(ApiClientOptions(domain="", audience="some_audience")) + # Empty audience still raises MissingRequiredArgumentError with pytest.raises(MissingRequiredArgumentError): _ = ApiClient(ApiClientOptions(domain="example.us.auth0.com", audience="")) @@ -107,37 +112,11 @@ async def test_verify_access_token_successfully(httpx_mock: HTTPXMock): assert claims["sub"] == "user_123" @pytest.mark.asyncio -async def test_verify_access_token_fail_no_iss(httpx_mock: HTTPXMock): +async def test_verify_access_token_fail_no_iss(): """ - Test that a token missing 'iss' claim fails verification. + Test that a token missing 'iss' claim fails verification during pre-validation. + No HTTP mocks needed since pre-validation rejects before network calls. """ - - httpx_mock.add_response( - method="GET", - url=DISCOVERY_URL, - json={ - "issuer": "https://auth0.local/", - "jwks_uri": JWKS_URL - } - ) - httpx_mock.add_response( - method="GET", - url=JWKS_URL, - json={ - "keys": [ - { - "kty": "RSA", - "kid": "TEST_KEY", - "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", - "e": "AQAB", - "alg": "RS256", - "use": "sig" - } - ] - } - ) - - access_token = await generate_token( domain="auth0.local", user_id="user_123", @@ -147,21 +126,21 @@ async def test_verify_access_token_fail_no_iss(httpx_mock: HTTPXMock): exp=True ) - api_client = ApiClient( ApiClientOptions(domain="auth0.local", audience="my-audience") ) - with pytest.raises(VerifyAccessTokenError) as err: await api_client.verify_access_token(access_token=access_token) - assert "issuer mismatch" in str(err.value).lower() or "invalid iss" in str(err.value).lower() + assert "missing 'iss' claim" in str(err.value).lower() @pytest.mark.asyncio async def test_verify_access_token_fail_invalid_iss(httpx_mock: HTTPXMock): """ Test that a token with an invalid issuer fails verification. + In single-domain mode, discovery is fetched from configured domain. + First issuer validation fails before JWKS fetch. """ httpx_mock.add_response( method="GET", @@ -171,22 +150,6 @@ async def test_verify_access_token_fail_invalid_iss(httpx_mock: HTTPXMock): "jwks_uri": JWKS_URL } ) - httpx_mock.add_response( - method="GET", - url=JWKS_URL, - json={ - "keys": [ - { - "kty": "RSA", - "kid": "TEST_KEY", - "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", - "e": "AQAB", - "alg": "RS256", - "use": "sig" - } - ] - } - ) access_token = await generate_token( domain="auth0.local", @@ -204,7 +167,8 @@ async def test_verify_access_token_fail_invalid_iss(httpx_mock: HTTPXMock): with pytest.raises(VerifyAccessTokenError) as err: await api_client.verify_access_token(access_token=access_token) - assert "issuer mismatch" in str(err.value).lower() or "invalid iss" in str(err.value).lower() + # Should fail at first issuer validation + assert "does not match" in str(err.value).lower() @pytest.mark.asyncio async def test_verify_access_token_fail_no_aud(httpx_mock: HTTPXMock): @@ -2815,3 +2779,1024 @@ async def test_get_token_by_exchange_profile_custom_timeout_honored(httpx_mock: assert err.value.status_code == 504 +# ===== MCD (Multi-Custom Domain) Tests ===== + +@pytest.mark.asyncio +async def test_mcd_init_missing_domain_and_domains(): + """Test that providing neither domain nor domains raises ConfigurationError.""" + from auth0_api_python.errors import ConfigurationError + + with pytest.raises(ConfigurationError) as err: + _ = ApiClient(ApiClientOptions(audience="my-audience")) + + assert "Must provide either 'domain' or 'domains'" in str(err.value) + + +@pytest.mark.asyncio +async def test_mcd_init_with_domain_only(): + """Test that single domain mode works (backward compatible).""" + api_client = ApiClient(ApiClientOptions( + domain="auth0.local", + audience="my-audience" + )) + + assert api_client.options.domain == "auth0.local" + assert api_client.options.domains is None + assert api_client._allowed_domains is None + + +@pytest.mark.asyncio +async def test_mcd_init_with_domains_list(): + """Test that domains list is accepted and normalized.""" + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com", "TENANT2.AUTH0.COM", "https://tenant3.auth0.com/"], + audience="my-audience" + )) + + assert api_client.options.domains is not None + assert api_client._allowed_domains == [ + "https://tenant1.auth0.com/", + "https://tenant2.auth0.com/", + "https://tenant3.auth0.com/" + ] + + +@pytest.mark.asyncio +async def test_mcd_init_with_both_domain_and_domains(): + """Test that hybrid mode (both domain and domains) is valid.""" + api_client = ApiClient(ApiClientOptions( + domain="auth0.local", + domains=["tenant1.auth0.com", "tenant2.auth0.com"], + audience="my-audience" + )) + + # Both should be stored + assert api_client.options.domain == "auth0.local" + assert api_client.options.domains is not None + assert api_client._allowed_domains == [ + "https://tenant1.auth0.com/", + "https://tenant2.auth0.com/" + ] + + +@pytest.mark.asyncio +async def test_mcd_init_with_empty_domains_list(): + """Test that empty domains list raises ConfigurationError.""" + from auth0_api_python.errors import ConfigurationError + + with pytest.raises(ConfigurationError) as err: + _ = ApiClient(ApiClientOptions( + domains=[], + audience="my-audience" + )) + + assert "domains list cannot be empty" in str(err.value) + + +@pytest.mark.asyncio +async def test_mcd_init_with_domains_resolver(): + """Test that resolver function is accepted.""" + def my_resolver(context: dict) -> list[str]: + return ["tenant1.auth0.com", "tenant2.auth0.com"] + + api_client = ApiClient(ApiClientOptions( + domains=my_resolver, + audience="my-audience" + )) + + assert callable(api_client._allowed_domains) + assert api_client._allowed_domains == my_resolver + + +@pytest.mark.asyncio +async def test_mcd_init_with_invalid_domains_type(): + """Test that invalid domains type raises ConfigurationError.""" + from auth0_api_python.errors import ConfigurationError + + with pytest.raises(ConfigurationError) as err: + _ = ApiClient(ApiClientOptions( + domains="invalid-string", # Should be list or callable + audience="my-audience" + )) + + assert "must be either a list" in str(err.value) + + +@pytest.mark.asyncio +async def test_mcd_resolve_allowed_domains_static_list(): + """Test _resolve_allowed_domains with static list.""" + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com", "tenant2.auth0.com"], + audience="my-audience" + )) + + # Valid issuer + result = await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") + assert result == ["https://tenant1.auth0.com/", "https://tenant2.auth0.com/"] + + # Invalid issuer + with pytest.raises(VerifyAccessTokenError) as err: + await api_client._resolve_allowed_domains("https://unknown.auth0.com/") + + assert "not in the list of allowed domains" in str(err.value) + + +@pytest.mark.asyncio +async def test_mcd_resolve_allowed_domains_with_resolver(): + """Test _resolve_allowed_domains with dynamic resolver.""" + def my_resolver(context: dict) -> list[str]: + # Return different domains based on issuer + if "tenant1" in context['unverified_iss']: + return ["tenant1.auth0.com"] + return ["tenant2.auth0.com", "tenant3.auth0.com"] + + api_client = ApiClient(ApiClientOptions( + domains=my_resolver, + audience="my-audience" + )) + + # Resolver allows tenant1 + result = await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") + assert result == ["https://tenant1.auth0.com/"] + + # Resolver allows tenant2 + result = await api_client._resolve_allowed_domains("https://tenant2.auth0.com/") + assert result == ["https://tenant2.auth0.com/", "https://tenant3.auth0.com/"] + + +@pytest.mark.asyncio +async def test_mcd_resolve_allowed_domains_resolver_rejects(): + """Test that resolver can reject issuers by not including them.""" + def my_resolver(context: dict) -> list[str]: + return ["tenant1.auth0.com"] # Only allows tenant1 + + api_client = ApiClient(ApiClientOptions( + domains=my_resolver, + audience="my-audience" + )) + + # Resolver rejects tenant2 + with pytest.raises(VerifyAccessTokenError) as err: + await api_client._resolve_allowed_domains("https://tenant2.auth0.com/") + + assert "not in the list of allowed domains" in str(err.value) + + +@pytest.mark.asyncio +async def test_mcd_resolve_allowed_domains_resolver_error(): + """Test that resolver errors are wrapped in DomainsResolverError.""" + from auth0_api_python.errors import DomainsResolverError + + def failing_resolver(context: dict) -> list[str]: + raise ValueError("Database connection failed") + + api_client = ApiClient(ApiClientOptions( + domains=failing_resolver, + audience="my-audience" + )) + + with pytest.raises(DomainsResolverError) as err: + await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") + + assert "Domains resolver function failed" in str(err.value) + assert "Database connection failed" in str(err.value) + + +@pytest.mark.asyncio +async def test_mcd_resolve_allowed_domains_resolver_invalid_return_type(): + """Test that resolver must return a list.""" + from auth0_api_python.errors import DomainsResolverError + + def bad_resolver(context: dict) -> str: + return "tenant1.auth0.com" # Should return list, not string + + api_client = ApiClient(ApiClientOptions( + domains=bad_resolver, + audience="my-audience" + )) + + with pytest.raises(DomainsResolverError) as err: + await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") + + assert "must return a list" in str(err.value) + + +@pytest.mark.asyncio +async def test_mcd_resolve_allowed_domains_resolver_empty_list(): + """Test that resolver cannot return empty list.""" + from auth0_api_python.errors import DomainsResolverError + + def empty_resolver(context: dict) -> list[str]: + return [] + + api_client = ApiClient(ApiClientOptions( + domains=empty_resolver, + audience="my-audience" + )) + + with pytest.raises(DomainsResolverError) as err: + await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") + + assert "returned an empty list" in str(err.value) + + +@pytest.mark.asyncio +async def test_mcd_resolve_allowed_domains_resolver_receives_context(): + """Test that resolver receives correct context with unverified_iss, request_url, and request_headers.""" + received_context = {} + + def context_capture_resolver(context: dict) -> list[str]: + received_context.update(context) + return ["tenant1.auth0.com"] + + api_client = ApiClient(ApiClientOptions( + domains=context_capture_resolver, + audience="my-audience" + )) + + # Call with request_url and request_headers + mock_url = "https://api.example.com/protected" + mock_headers = {"host": "api.example.com", "user-agent": "test"} + await api_client._resolve_allowed_domains( + "https://tenant1.auth0.com/", + request_url=mock_url, + request_headers=mock_headers + ) + + # Verify context was passed correctly (matching TypeScript SDK structure) + assert received_context['unverified_iss'] == "https://tenant1.auth0.com/" + assert received_context['request_url'] == mock_url + assert received_context['request_headers'] == mock_headers + + +@pytest.mark.asyncio +async def test_mcd_resolve_allowed_domains_single_domain_mode(): + """Test that single domain mode returns None from _resolve_allowed_domains.""" + api_client = ApiClient(ApiClientOptions( + domain="auth0.local", + audience="my-audience" + )) + + # Single domain mode should return None + result = await api_client._resolve_allowed_domains("https://auth0.local/") + assert result is None + + +@pytest.mark.asyncio +async def test_mcd_verify_rejects_symmetric_algorithm(): + """Test that verify_access_token rejects tokens with symmetric algorithms (HS256).""" + import base64 + import json + + # Create a token with HS256 algorithm in header (without actually signing it) + header = {"alg": "HS256", "typ": "JWT", "kid": "test-key"} + payload = { + "iss": "https://tenant1.auth0.com/", + "aud": "my-audience", + "sub": "user123", + "exp": 9999999999, + "iat": 1000000000 + } + + # Encode header and payload (signature doesn't matter for this test) + header_b64 = base64.urlsafe_b64encode(json.dumps(header).encode()).decode().rstrip('=') + payload_b64 = base64.urlsafe_b64encode(json.dumps(payload).encode()).decode().rstrip('=') + fake_signature = "fake_signature" + hs256_token = f"{header_b64}.{payload_b64}.{fake_signature}" + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + # Should reject immediately without network calls + with pytest.raises(VerifyAccessTokenError) as err: + await api_client.verify_access_token(hs256_token) + + assert "Symmetric algorithm 'HS256' is not supported" in str(err.value) + + +@pytest.mark.asyncio +async def test_mcd_verify_early_issuer_validation(httpx_mock): + """Test that issuer validation happens before JWKS fetch (performance optimization).""" + # Generate token from disallowed issuer + token = await generate_token( + domain="disallowed-tenant.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://disallowed-tenant.auth0.com/" + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com", "tenant2.auth0.com"], + audience="my-audience" + )) + + # Mock should NOT be called because issuer validation happens first + # If JWKS is fetched, test will fail because no mock is registered + + with pytest.raises(VerifyAccessTokenError) as err: + await api_client.verify_access_token(token) + + # Verify the error is about issuer not being allowed + assert "not in the list of allowed domains" in str(err.value) + + # Verify no HTTP calls were made (JWKS fetch was skipped) + assert len(httpx_mock.get_requests()) == 0 + + +@pytest.mark.asyncio +async def test_mcd_discovery_uses_token_issuer(httpx_mock): + """Test that discovery is fetched from token's issuer, not configured domain.""" + # Generate token from tenant1 + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery for tenant1 (token's issuer) + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS for tenant1 + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + # Create client with multiple domains + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com", "tenant2.auth0.com"], + audience="my-audience" + )) + + # Verify token - should use tenant1's discovery + claims = await api_client.verify_access_token(token) + + assert claims["sub"] == "user123" + + # Verify the correct discovery URL was called + requests = httpx_mock.get_requests() + discovery_requests = [r for r in requests if 'openid-configuration' in str(r.url)] + assert len(discovery_requests) == 1 + assert str(discovery_requests[0].url) == "https://tenant1.auth0.com/.well-known/openid-configuration" + + +@pytest.mark.asyncio +async def test_mcd_first_issuer_validation(httpx_mock): + """Test first issuer validation: token iss must match discovery issuer.""" + # Generate token with tenant1 issuer + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery that returns DIFFERENT issuer (attack scenario) + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://malicious.auth0.com/", # Mismatch! + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + # Should fail at first issuer validation (before JWKS fetch) + with pytest.raises(VerifyAccessTokenError) as err: + await api_client.verify_access_token(token) + + assert "token issuer does not match the discovery issuer" in str(err.value).lower() + + +@pytest.mark.asyncio +async def test_mcd_discovery_missing_issuer_field(httpx_mock): + """Test that missing issuer field in discovery causes clear error.""" + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery WITHOUT issuer field + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + # Missing "issuer" field + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + with pytest.raises(VerifyAccessTokenError) as err: + await api_client.verify_access_token(token) + + assert "missing 'issuer' field" in str(err.value).lower() + + +@pytest.mark.asyncio +async def test_mcd_discovery_missing_jwks_uri_field(httpx_mock): + """Test that missing jwks_uri field in discovery causes clear error.""" + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery WITHOUT jwks_uri field + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/" + # Missing "jwks_uri" field + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + with pytest.raises(VerifyAccessTokenError) as err: + await api_client.verify_access_token(token) + + assert "missing 'jwks_uri' field" in str(err.value).lower() + + +@pytest.mark.asyncio +async def test_mcd_jwks_fetched_from_issuer_jwks_uri(httpx_mock): + """Test that JWKS is fetched from issuer-specific jwks_uri in discovery metadata.""" + # Generate token from tenant1 + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery for tenant1 with specific jwks_uri + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" # Tenant-specific + } + ) + + # Mock JWKS for tenant1 + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com", "tenant2.auth0.com"], + audience="my-audience" + )) + + # Verify token - should fetch JWKS from tenant1's specific URI + claims = await api_client.verify_access_token(token) + + assert claims["sub"] == "user123" + + # Verify JWKS was fetched from tenant1's specific URI + requests = httpx_mock.get_requests() + jwks_requests = [r for r in requests if 'jwks.json' in str(r.url)] + assert len(jwks_requests) == 1 + assert str(jwks_requests[0].url) == "https://tenant1.auth0.com/.well-known/jwks.json" + + +@pytest.mark.asyncio +async def test_mcd_signature_verification_with_correct_key(httpx_mock): + """Test that signature verification uses the correct public key based on token's kid.""" + # Generate token + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS with multiple keys - correct key is TEST_KEY + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "OTHER_KEY", # Different key + "n": "different_modulus_value", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + }, + { + "kty": "RSA", + "kid": "TEST_KEY", # Correct key (matches token) + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + # Should successfully verify using TEST_KEY (not OTHER_KEY) + claims = await api_client.verify_access_token(token) + + assert claims["sub"] == "user123" + + +@pytest.mark.asyncio +async def test_mcd_jwks_no_matching_kid(httpx_mock): + """Test clear error when token's kid not found in JWKS.""" + # Generate token (will have kid="TEST_KEY") + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS with keys that DON'T match token's kid + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "DIFFERENT_KEY", # Doesn't match token's kid + "n": "some_modulus", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + }, + { + "kty": "RSA", + "kid": "ANOTHER_KEY", # Also doesn't match + "n": "another_modulus", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + # Should fail with clear error about missing kid + with pytest.raises(VerifyAccessTokenError) as err: + await api_client.verify_access_token(token) + + assert "no matching key found in jwks" in str(err.value).lower() + + +@pytest.mark.asyncio +async def test_mcd_discovery_cached_per_issuer(httpx_mock): + """Test that discovery metadata is cached per issuer.""" + # Generate tokens from two different issuers + token1 = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + token2 = await generate_token( + domain="tenant2.auth0.com", + user_id="user456", + audience="my-audience", + issuer="https://tenant2.auth0.com/" + ) + + # Mock discovery for tenant1 + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS for tenant1 + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + # Mock discovery for tenant2 + httpx_mock.add_response( + method="GET", + url="https://tenant2.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant2.auth0.com/", + "jwks_uri": "https://tenant2.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS for tenant2 + httpx_mock.add_response( + method="GET", + url="https://tenant2.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com", "tenant2.auth0.com"], + audience="my-audience" + )) + + # Verify both tokens - should cache separately + claims1 = await api_client.verify_access_token(token1) + claims2 = await api_client.verify_access_token(token2) + + assert claims1["sub"] == "user123" + assert claims2["sub"] == "user456" + + # Verify both discovery endpoints were called + requests = httpx_mock.get_requests() + discovery_requests = [r for r in requests if 'openid-configuration' in str(r.url)] + assert len(discovery_requests) == 2 + + +@pytest.mark.asyncio +async def test_mcd_discovery_cache_hit(httpx_mock): + """Test that second request for same issuer uses cached discovery.""" + # Generate two tokens from same issuer + token1 = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + token2 = await generate_token( + domain="tenant1.auth0.com", + user_id="user456", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery for tenant1 (only once) + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS for tenant1 (only once) + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + # Verify first token - fetches from network + claims1 = await api_client.verify_access_token(token1) + assert claims1["sub"] == "user123" + + # Verify second token - should use cache (no additional HTTP calls) + claims2 = await api_client.verify_access_token(token2) + assert claims2["sub"] == "user456" + + # Verify discovery was only called once + requests = httpx_mock.get_requests() + discovery_requests = [r for r in requests if 'openid-configuration' in str(r.url)] + assert len(discovery_requests) == 1 + + # Verify JWKS was only called once + jwks_requests = [r for r in requests if 'jwks.json' in str(r.url)] + assert len(jwks_requests) == 1 + + +@pytest.mark.asyncio +async def test_mcd_jwks_cached_per_uri(httpx_mock): + """Test that JWKS is cached per URI.""" + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + # Verify token + claims = await api_client.verify_access_token(token) + assert claims["sub"] == "user123" + + # Verify cache key is based on jwks_uri + cache_key = "https://tenant1.auth0.com/.well-known/jwks.json" + cached_jwks = api_client._jwks_cache.get(cache_key) + assert cached_jwks is not None + assert "keys" in cached_jwks + + +@pytest.mark.asyncio +async def test_mcd_jwks_cache_hit(httpx_mock): + """Test that multiple tokens use cached JWKS.""" + token1 = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + token2 = await generate_token( + domain="tenant1.auth0.com", + user_id="user456", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery (once) + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS (once) + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + # Verify both tokens + claims1 = await api_client.verify_access_token(token1) + claims2 = await api_client.verify_access_token(token2) + + assert claims1["sub"] == "user123" + assert claims2["sub"] == "user456" + + # Verify JWKS was only fetched once + requests = httpx_mock.get_requests() + jwks_requests = [r for r in requests if 'jwks.json' in str(r.url)] + assert len(jwks_requests) == 1 + + +@pytest.mark.asyncio +async def test_mcd_cache_max_entries_configuration(httpx_mock): + """Test that cache_max_entries configuration is respected.""" + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com", "tenant2.auth0.com"], + audience="my-audience", + cache_max_entries=2 + )) + + # Verify both caches have correct max_entries + assert api_client._discovery_cache._max_entries == 2 + assert api_client._jwks_cache._max_entries == 2 + + # Test with default value + api_client_default = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + assert api_client_default._discovery_cache._max_entries == 100 + assert api_client_default._jwks_cache._max_entries == 100 + + +@pytest.mark.asyncio +@pytest.mark.parametrize("max_age_header,configured_ttl,expected_ttl", [ + ("max-age=60", 600, 60), + ("max-age=3600", 600, 600), + (None, 600, 600), +]) +async def test_effective_ttl_from_cache_control(httpx_mock, max_age_header, configured_ttl, expected_ttl): + """Test that effective TTL respects min(max_age, configured_ttl).""" + + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + discovery_headers = {"cache-control": max_age_header} if max_age_header else {} + jwks_headers = {"cache-control": max_age_header} if max_age_header else {} + + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + }, + headers=discovery_headers, + ) + + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + }, + headers=jwks_headers, + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience", + cache_ttl_seconds=configured_ttl, + )) + + before = datetime.now() + await api_client.verify_access_token(token) + + # Inspect discovery cache entry expiry + discovery_key = "https://tenant1.auth0.com/" + _, discovery_expiry = api_client._discovery_cache._cache[discovery_key] + discovery_ttl = (discovery_expiry - before).total_seconds() + assert abs(discovery_ttl - expected_ttl) < 2 + + # Inspect JWKS cache entry expiry + jwks_key = "https://tenant1.auth0.com/.well-known/jwks.json" + _, jwks_expiry = api_client._jwks_cache._cache[jwks_key] + jwks_ttl = (jwks_expiry - before).total_seconds() + assert abs(jwks_ttl - expected_ttl) < 2 + + diff --git a/tests/test_cache.py b/tests/test_cache.py new file mode 100644 index 0000000..eb001e0 --- /dev/null +++ b/tests/test_cache.py @@ -0,0 +1,163 @@ +import time + +from auth0_api_python.cache import InMemoryCache + + +# ===== InMemoryCache Basic Operations ===== + + +def test_in_memory_cache_get_set(): + """Test basic get and set operations.""" + cache = InMemoryCache() + + cache.set("key1", {"data": "value1"}) + cache.set("key2", "string_value") + cache.set("key3", 123) + + assert cache.get("key1") == {"data": "value1"} + assert cache.get("key2") == "string_value" + assert cache.get("key3") == 123 + + +def test_in_memory_cache_ttl_expiry(): + """Test that entries expire after TTL.""" + cache = InMemoryCache() + + cache.set("key1", "value1", ttl_seconds=1) + + assert cache.get("key1") == "value1" + + time.sleep(1.1) + + assert cache.get("key1") is None + + +def test_in_memory_cache_delete(): + """Test delete operation.""" + cache = InMemoryCache() + + cache.set("key1", "value1") + assert cache.get("key1") == "value1" + + cache.delete("key1") + assert cache.get("key1") is None + + cache.delete("nonexistent") + + +def test_in_memory_cache_clear(): + """Test clear all operation.""" + cache = InMemoryCache() + + cache.set("key1", "value1") + cache.set("key2", "value2") + cache.set("key3", "value3") + + assert cache.get("key1") == "value1" + assert cache.get("key2") == "value2" + + cache.clear() + + assert cache.get("key1") is None + assert cache.get("key2") is None + assert cache.get("key3") is None + + +def test_in_memory_cache_get_expired(): + """Test that expired entries return None and are removed.""" + cache = InMemoryCache() + + cache.set("key1", "value1", ttl_seconds=1) + time.sleep(1.1) + + result = cache.get("key1") + assert result is None + + result_again = cache.get("key1") + assert result_again is None + + +def test_in_memory_cache_get_nonexistent(): + """Test that nonexistent keys return None.""" + cache = InMemoryCache() + + assert cache.get("nonexistent") is None + assert cache.get("another_missing_key") is None + + +def test_in_memory_cache_overwrite(): + """Test overwriting existing keys.""" + cache = InMemoryCache() + + cache.set("key1", "original_value") + assert cache.get("key1") == "original_value" + + cache.set("key1", "new_value") + assert cache.get("key1") == "new_value" + + cache.set("key1", {"complex": "value"}, ttl_seconds=10) + assert cache.get("key1") == {"complex": "value"} + + +def test_in_memory_cache_no_ttl(): + """Test that entries without TTL never expire.""" + cache = InMemoryCache() + + cache.set("key1", "value1") + + time.sleep(0.5) + assert cache.get("key1") == "value1" + + time.sleep(0.5) + assert cache.get("key1") == "value1" + + +# ===== LRU Eviction ===== + + +def test_in_memory_cache_max_entries_eviction(): + """Test LRU eviction when max_entries is reached.""" + cache = InMemoryCache(max_entries=3) + + cache.set("key1", "value1") + cache.set("key2", "value2") + cache.set("key3", "value3") + + assert cache.get("key1") == "value1" + assert cache.get("key2") == "value2" + assert cache.get("key3") == "value3" + + cache.set("key4", "value4") + + assert cache.get("key1") is None + assert cache.get("key2") == "value2" + assert cache.get("key3") == "value3" + assert cache.get("key4") == "value4" + + +def test_in_memory_cache_lru_access_order(): + """Test that least recently used entry is evicted first.""" + cache = InMemoryCache(max_entries=3) + + cache.set("key1", "value1") + cache.set("key2", "value2") + cache.set("key3", "value3") + + cache.get("key1") + + time.sleep(0.01) + + cache.get("key2") + + time.sleep(0.01) + + cache.set("key4", "value4") + + assert cache.get("key3") is None + assert cache.get("key1") == "value1" + assert cache.get("key2") == "value2" + assert cache.get("key4") == "value4" + + + + diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..ed86161 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,104 @@ +""" +Tests for utility functions in auth0_api_python.utils +""" + +import asyncio + +import pytest +from auth0_api_python.utils import normalize_domain, get_unverified_payload, parse_cache_control_max_age +from auth0_api_python.token_utils import generate_token + + +# ===== normalize_domain ===== + + +def test_normalize_domain_bare(): + """Test normalization of bare domain.""" + assert normalize_domain("tenant.auth0.com") == "https://tenant.auth0.com/" + + +def test_normalize_domain_with_https(): + """Test normalization of domain with https:// prefix.""" + assert normalize_domain("https://tenant.auth0.com") == "https://tenant.auth0.com/" + + +def test_normalize_domain_with_http(): + """Test normalization of domain with http:// prefix (converts to https).""" + assert normalize_domain("http://tenant.auth0.com") == "https://tenant.auth0.com/" + + +def test_normalize_domain_with_trailing_slash(): + """Test normalization of domain with trailing slash.""" + assert normalize_domain("tenant.auth0.com/") == "https://tenant.auth0.com/" + + +def test_normalize_domain_with_https_and_trailing_slash(): + """Test normalization of fully formatted issuer URL.""" + assert normalize_domain("https://tenant.auth0.com/") == "https://tenant.auth0.com/" + + +def test_normalize_domain_mixed_case(): + """Test normalization converts to lowercase.""" + assert normalize_domain("TENANT.AUTH0.COM") == "https://tenant.auth0.com/" + + +def test_normalize_domain_mixed_case_with_protocol(): + """Test normalization with mixed case protocol and domain.""" + assert normalize_domain("HTTPS://Tenant.Auth0.COM") == "https://tenant.auth0.com/" + + +def test_normalize_domain_with_whitespace(): + """Test normalization strips leading and trailing whitespace.""" + assert normalize_domain(" tenant.auth0.com ") == "https://tenant.auth0.com/" + + +def test_normalize_domain_custom_domain(): + """Test normalization with custom domain.""" + assert normalize_domain("auth.example.com") == "https://auth.example.com/" + + +def test_normalize_domain_multiple_slashes(): + """Test normalization with multiple trailing slashes.""" + assert normalize_domain("tenant.auth0.com///") == "https://tenant.auth0.com/" + + +# ===== get_unverified_payload ===== + + +def test_get_unverified_payload_valid_token(): + """Test extracting payload from a valid token.""" + token = asyncio.run(generate_token( + domain="tenant.auth0.com", + user_id="user123", + audience="my-api", + issuer="https://tenant.auth0.com/" + )) + + payload = get_unverified_payload(token) + + assert payload["iss"] == "https://tenant.auth0.com/" + assert payload["aud"] == "my-api" + assert payload["sub"] == "user123" + assert "exp" in payload + assert "iat" in payload + + +def test_get_unverified_payload_invalid_token(): + """Test that malformed token raises ValueError.""" + invalid_token = "not.a.valid.jwt.token" + + with pytest.raises(ValueError, match="Invalid token format"): + get_unverified_payload(invalid_token) + + +# ===== parse_cache_control_max_age ===== + + +@pytest.mark.parametrize("headers,expected", [ + ({"cache-control": "max-age=300"}, 300), + ({"cache-control": "public, max-age=600, must-revalidate"}, 600), + ({}, None), +]) +def test_parse_cache_control_max_age(headers, expected): + """Test parsing max-age from Cache-Control headers.""" + assert parse_cache_control_max_age(headers) == expected From 479cde77b5e51c7068f55489f5e37a69afee1075 Mon Sep 17 00:00:00 2001 From: Snehil Kishore Date: Mon, 23 Feb 2026 18:39:13 +0530 Subject: [PATCH 2/8] fix: update dependencies and clean up code formatting --- .github/workflows/test.yml | 2 +- src/auth0_api_python/api_client.py | 34 +++++++++++++++--------------- src/auth0_api_python/cache.py | 2 +- src/auth0_api_python/config.py | 2 +- src/auth0_api_python/types.py | 6 +++--- src/auth0_api_python/utils.py | 23 ++++++++++---------- 6 files changed, 35 insertions(+), 34 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 6e6ce27..8db71be 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -36,7 +36,7 @@ jobs: - name: Install Poetry uses: snok/install-poetry@v1 with: - version: latest + version: 2.2.1 virtualenvs-create: true virtualenvs-in-project: true installer-parallel: true diff --git a/src/auth0_api_python/api_client.py b/src/auth0_api_python/api_client.py index 8b22974..a16b53a 100644 --- a/src/auth0_api_python/api_client.py +++ b/src/auth0_api_python/api_client.py @@ -56,11 +56,11 @@ def __init__(self, options: ApiClientOptions): # Validate audience is always required if not options.audience: raise MissingRequiredArgumentError("audience") - + # Validate domains parameter if provided if options.domains is not None: if isinstance(options.domains, list): - # Static list validation + # Static list validation if len(options.domains) == 0: raise ConfigurationError("domains list cannot be empty") # Normalize and store domains @@ -75,7 +75,7 @@ def __init__(self, options: ApiClientOptions): else: # Single domain mode self._allowed_domains = None - + # Validate domain/domains configuration if not options.domain and not options.domains: raise ConfigurationError( @@ -111,20 +111,20 @@ async def _resolve_allowed_domains( ) -> list[str]: """ Resolve and validate allowed domains for the given issuer. - + Handles three modes: 1. Static list: Returns normalized list, validates issuer against it 2. Dynamic resolver: Invokes resolver function, validates issuer against result 3. Single domain: Returns None (backward compatibility, uses domain) - + Args: unverified_iss: The issuer claim from the token (not yet verified) request_url: Optional request URL for dynamic resolvers request_headers: Optional request headers for dynamic resolvers - + Returns: List of normalized allowed domain strings - + Raises: DomainsResolverError: If resolver invocation fails VerifyAccessTokenError: If issuer is not in allowed domains @@ -132,7 +132,7 @@ async def _resolve_allowed_domains( # Single domain mode if self._allowed_domains is None: return None - + # Static list mode if isinstance(self._allowed_domains, list): allowed_domains = self._allowed_domains @@ -144,7 +144,7 @@ async def _resolve_allowed_domains( 'request_headers': request_headers, 'unverified_iss': unverified_iss } - + # Invoke resolver try: result = self._allowed_domains(context) @@ -152,30 +152,30 @@ async def _resolve_allowed_domains( raise DomainsResolverError( f"Domains resolver function failed: {str(e)}" ) from e - + # Validate resolver result if not isinstance(result, list): raise DomainsResolverError( "Domains resolver must return a list" ) - + if len(result) == 0: raise DomainsResolverError( "Domains resolver returned an empty list" ) - + # Normalize domains from resolver allowed_domains = [normalize_domain(d) for d in result] else: # Should never happen due to __init__ validation raise ConfigurationError("Invalid _allowed_domains type") - + # Validate issuer is in allowed domains if unverified_iss not in allowed_domains: raise VerifyAccessTokenError( "Token issuer is not in the list of allowed domains" ) - + return allowed_domains async def verify_request( @@ -442,15 +442,15 @@ async def verify_access_token( raise VerifyAccessTokenError( f"Failed to fetch OIDC discovery metadata: {str(e)}" ) from e - + # First issuer validation: Prevent issuer confusion attacks discovery_issuer = metadata.get("issuer") if not discovery_issuer: raise VerifyAccessTokenError("Discovery metadata missing 'issuer' field") - + # Normalize discovery issuer for comparison normalized_discovery_issuer = normalize_domain(discovery_issuer) - + if normalized_iss != normalized_discovery_issuer: raise VerifyAccessTokenError( "Token issuer does not match the discovery issuer" diff --git a/src/auth0_api_python/cache.py b/src/auth0_api_python/cache.py index 2787709..4a9b950 100644 --- a/src/auth0_api_python/cache.py +++ b/src/auth0_api_python/cache.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod -from typing import Optional, Any from datetime import datetime, timedelta +from typing import Any, Optional class CacheAdapter(ABC): diff --git a/src/auth0_api_python/config.py b/src/auth0_api_python/config.py index 3a9e15c..6c5f1c7 100644 --- a/src/auth0_api_python/config.py +++ b/src/auth0_api_python/config.py @@ -13,7 +13,7 @@ class ApiClientOptions: Configuration for the ApiClient. Args: - domain: The Auth0 domain for single-domain mode and client flows, + domain: The Auth0 domain for single-domain mode and client flows, e.g., "my-tenant.us.auth0.com". Optional if domains is provided. domains: List of allowed domains or a resolver function for multi-domain mode. Can be a static list of domain strings or a callable that returns diff --git a/src/auth0_api_python/types.py b/src/auth0_api_python/types.py index 14ce150..9740f1c 100644 --- a/src/auth0_api_python/types.py +++ b/src/auth0_api_python/types.py @@ -8,7 +8,7 @@ class DomainsResolverContext(TypedDict, total=False): """ Context passed to domains resolver functions. - + Attributes: request_url: The URL the API request was made to (optional) request_headers: Request headers dict (e.g., Host, X-Forwarded-Host) (optional) @@ -37,12 +37,12 @@ class DomainsResolverContext(TypedDict, total=False): Example: from auth0_api_python import DomainsResolverContext - + def my_resolver(context: DomainsResolverContext) -> list[str]: unverified_iss = context['unverified_iss'] request_url = context.get('request_url') request_headers = context.get('request_headers') - + # Fetch allowed domains based on context return ['tenant1.auth0.com', 'tenant2.auth0.com'] """ diff --git a/src/auth0_api_python/utils.py b/src/auth0_api_python/utils.py index c499ed0..7c27176 100644 --- a/src/auth0_api_python/utils.py +++ b/src/auth0_api_python/utils.py @@ -7,7 +7,8 @@ import hashlib import json import re -from typing import Any, Callable, Mapping, Optional, Union +from collections.abc import Mapping +from typing import Any, Callable, Optional, Union import httpx from ada_url import URL @@ -43,11 +44,11 @@ def parse_cache_control_max_age(headers: Mapping[str, str]) -> Optional[int]: def normalize_domain(domain: str) -> str: """ Normalize a domain string to a standard issuer URL format. - + Args: - domain: Domain string in any format (e.g., "tenant.auth0.com", + domain: Domain string in any format (e.g., "tenant.auth0.com", "https://tenant.auth0.com/", "TENANT.AUTH0.COM") - + Returns: Normalized issuer URL (e.g., "https://tenant.auth0.com/") @@ -114,14 +115,14 @@ async def fetch_jwks( def _decode_jwt_segment(token: Union[str, bytes], segment_index: int) -> dict: """ Decode a specific segment from a JWT without verifying signature. - + Args: token: The JWT token (string or bytes) segment_index: 0 for header, 1 for payload - + Returns: Decoded segment as dictionary - + Raises: ValueError: If token format is invalid """ @@ -143,10 +144,10 @@ def _decode_jwt_segment(token: Union[str, bytes], segment_index: int) -> dict: def get_unverified_header(token: Union[str, bytes]) -> dict: """ Parse the JWT header without verifying signature. - + Args: token: The JWT token - + Returns: Decoded header as dictionary """ @@ -156,10 +157,10 @@ def get_unverified_header(token: Union[str, bytes]) -> dict: def get_unverified_payload(token: Union[str, bytes]) -> dict: """ Parse the JWT payload without verifying signature. - + Args: token: The JWT token - + Returns: Decoded payload (claims) as dictionary """ From c7673086f747cfeb72857370a5031e54db4a93be Mon Sep 17 00:00:00 2001 From: Snehil Kishore Date: Mon, 23 Feb 2026 18:47:14 +0530 Subject: [PATCH 3/8] fix: lint issues in test files --- tests/test_api_client.py | 224 +++++++++++++++++++-------------------- tests/test_cache.py | 1 - tests/test_utils.py | 8 +- 3 files changed, 118 insertions(+), 115 deletions(-) diff --git a/tests/test_api_client.py b/tests/test_api_client.py index bdbfb01..24a26f3 100644 --- a/tests/test_api_client.py +++ b/tests/test_api_client.py @@ -53,7 +53,7 @@ async def test_init_missing_args(): Test that providing no audience or domain raises an error. """ from auth0_api_python.errors import ConfigurationError - + # Empty domain now raises ConfigurationError (not MissingRequiredArgumentError) with pytest.raises(ConfigurationError): _ = ApiClient(ApiClientOptions(domain="", audience="some_audience")) @@ -2785,10 +2785,10 @@ async def test_get_token_by_exchange_profile_custom_timeout_honored(httpx_mock: async def test_mcd_init_missing_domain_and_domains(): """Test that providing neither domain nor domains raises ConfigurationError.""" from auth0_api_python.errors import ConfigurationError - + with pytest.raises(ConfigurationError) as err: _ = ApiClient(ApiClientOptions(audience="my-audience")) - + assert "Must provide either 'domain' or 'domains'" in str(err.value) @@ -2799,7 +2799,7 @@ async def test_mcd_init_with_domain_only(): domain="auth0.local", audience="my-audience" )) - + assert api_client.options.domain == "auth0.local" assert api_client.options.domains is None assert api_client._allowed_domains is None @@ -2812,7 +2812,7 @@ async def test_mcd_init_with_domains_list(): domains=["tenant1.auth0.com", "TENANT2.AUTH0.COM", "https://tenant3.auth0.com/"], audience="my-audience" )) - + assert api_client.options.domains is not None assert api_client._allowed_domains == [ "https://tenant1.auth0.com/", @@ -2829,7 +2829,7 @@ async def test_mcd_init_with_both_domain_and_domains(): domains=["tenant1.auth0.com", "tenant2.auth0.com"], audience="my-audience" )) - + # Both should be stored assert api_client.options.domain == "auth0.local" assert api_client.options.domains is not None @@ -2843,13 +2843,13 @@ async def test_mcd_init_with_both_domain_and_domains(): async def test_mcd_init_with_empty_domains_list(): """Test that empty domains list raises ConfigurationError.""" from auth0_api_python.errors import ConfigurationError - + with pytest.raises(ConfigurationError) as err: _ = ApiClient(ApiClientOptions( domains=[], audience="my-audience" )) - + assert "domains list cannot be empty" in str(err.value) @@ -2858,12 +2858,12 @@ async def test_mcd_init_with_domains_resolver(): """Test that resolver function is accepted.""" def my_resolver(context: dict) -> list[str]: return ["tenant1.auth0.com", "tenant2.auth0.com"] - + api_client = ApiClient(ApiClientOptions( domains=my_resolver, audience="my-audience" )) - + assert callable(api_client._allowed_domains) assert api_client._allowed_domains == my_resolver @@ -2872,13 +2872,13 @@ def my_resolver(context: dict) -> list[str]: async def test_mcd_init_with_invalid_domains_type(): """Test that invalid domains type raises ConfigurationError.""" from auth0_api_python.errors import ConfigurationError - + with pytest.raises(ConfigurationError) as err: _ = ApiClient(ApiClientOptions( domains="invalid-string", # Should be list or callable audience="my-audience" )) - + assert "must be either a list" in str(err.value) @@ -2889,15 +2889,15 @@ async def test_mcd_resolve_allowed_domains_static_list(): domains=["tenant1.auth0.com", "tenant2.auth0.com"], audience="my-audience" )) - + # Valid issuer result = await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") assert result == ["https://tenant1.auth0.com/", "https://tenant2.auth0.com/"] - + # Invalid issuer with pytest.raises(VerifyAccessTokenError) as err: await api_client._resolve_allowed_domains("https://unknown.auth0.com/") - + assert "not in the list of allowed domains" in str(err.value) @@ -2909,16 +2909,16 @@ def my_resolver(context: dict) -> list[str]: if "tenant1" in context['unverified_iss']: return ["tenant1.auth0.com"] return ["tenant2.auth0.com", "tenant3.auth0.com"] - + api_client = ApiClient(ApiClientOptions( domains=my_resolver, audience="my-audience" )) - + # Resolver allows tenant1 result = await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") assert result == ["https://tenant1.auth0.com/"] - + # Resolver allows tenant2 result = await api_client._resolve_allowed_domains("https://tenant2.auth0.com/") assert result == ["https://tenant2.auth0.com/", "https://tenant3.auth0.com/"] @@ -2929,16 +2929,16 @@ async def test_mcd_resolve_allowed_domains_resolver_rejects(): """Test that resolver can reject issuers by not including them.""" def my_resolver(context: dict) -> list[str]: return ["tenant1.auth0.com"] # Only allows tenant1 - + api_client = ApiClient(ApiClientOptions( domains=my_resolver, audience="my-audience" )) - + # Resolver rejects tenant2 with pytest.raises(VerifyAccessTokenError) as err: await api_client._resolve_allowed_domains("https://tenant2.auth0.com/") - + assert "not in the list of allowed domains" in str(err.value) @@ -2946,18 +2946,18 @@ def my_resolver(context: dict) -> list[str]: async def test_mcd_resolve_allowed_domains_resolver_error(): """Test that resolver errors are wrapped in DomainsResolverError.""" from auth0_api_python.errors import DomainsResolverError - + def failing_resolver(context: dict) -> list[str]: raise ValueError("Database connection failed") - + api_client = ApiClient(ApiClientOptions( domains=failing_resolver, audience="my-audience" )) - + with pytest.raises(DomainsResolverError) as err: await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") - + assert "Domains resolver function failed" in str(err.value) assert "Database connection failed" in str(err.value) @@ -2966,18 +2966,18 @@ def failing_resolver(context: dict) -> list[str]: async def test_mcd_resolve_allowed_domains_resolver_invalid_return_type(): """Test that resolver must return a list.""" from auth0_api_python.errors import DomainsResolverError - + def bad_resolver(context: dict) -> str: return "tenant1.auth0.com" # Should return list, not string - + api_client = ApiClient(ApiClientOptions( domains=bad_resolver, audience="my-audience" )) - + with pytest.raises(DomainsResolverError) as err: await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") - + assert "must return a list" in str(err.value) @@ -2985,18 +2985,18 @@ def bad_resolver(context: dict) -> str: async def test_mcd_resolve_allowed_domains_resolver_empty_list(): """Test that resolver cannot return empty list.""" from auth0_api_python.errors import DomainsResolverError - + def empty_resolver(context: dict) -> list[str]: return [] - + api_client = ApiClient(ApiClientOptions( domains=empty_resolver, audience="my-audience" )) - + with pytest.raises(DomainsResolverError) as err: await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") - + assert "returned an empty list" in str(err.value) @@ -3004,16 +3004,16 @@ def empty_resolver(context: dict) -> list[str]: async def test_mcd_resolve_allowed_domains_resolver_receives_context(): """Test that resolver receives correct context with unverified_iss, request_url, and request_headers.""" received_context = {} - + def context_capture_resolver(context: dict) -> list[str]: received_context.update(context) return ["tenant1.auth0.com"] - + api_client = ApiClient(ApiClientOptions( domains=context_capture_resolver, audience="my-audience" )) - + # Call with request_url and request_headers mock_url = "https://api.example.com/protected" mock_headers = {"host": "api.example.com", "user-agent": "test"} @@ -3022,7 +3022,7 @@ def context_capture_resolver(context: dict) -> list[str]: request_url=mock_url, request_headers=mock_headers ) - + # Verify context was passed correctly (matching TypeScript SDK structure) assert received_context['unverified_iss'] == "https://tenant1.auth0.com/" assert received_context['request_url'] == mock_url @@ -3036,7 +3036,7 @@ async def test_mcd_resolve_allowed_domains_single_domain_mode(): domain="auth0.local", audience="my-audience" )) - + # Single domain mode should return None result = await api_client._resolve_allowed_domains("https://auth0.local/") assert result is None @@ -3047,7 +3047,7 @@ async def test_mcd_verify_rejects_symmetric_algorithm(): """Test that verify_access_token rejects tokens with symmetric algorithms (HS256).""" import base64 import json - + # Create a token with HS256 algorithm in header (without actually signing it) header = {"alg": "HS256", "typ": "JWT", "kid": "test-key"} payload = { @@ -3057,22 +3057,22 @@ async def test_mcd_verify_rejects_symmetric_algorithm(): "exp": 9999999999, "iat": 1000000000 } - + # Encode header and payload (signature doesn't matter for this test) header_b64 = base64.urlsafe_b64encode(json.dumps(header).encode()).decode().rstrip('=') payload_b64 = base64.urlsafe_b64encode(json.dumps(payload).encode()).decode().rstrip('=') fake_signature = "fake_signature" hs256_token = f"{header_b64}.{payload_b64}.{fake_signature}" - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com"], audience="my-audience" )) - + # Should reject immediately without network calls with pytest.raises(VerifyAccessTokenError) as err: await api_client.verify_access_token(hs256_token) - + assert "Symmetric algorithm 'HS256' is not supported" in str(err.value) @@ -3086,21 +3086,21 @@ async def test_mcd_verify_early_issuer_validation(httpx_mock): audience="my-audience", issuer="https://disallowed-tenant.auth0.com/" ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com", "tenant2.auth0.com"], audience="my-audience" )) - + # Mock should NOT be called because issuer validation happens first # If JWKS is fetched, test will fail because no mock is registered - + with pytest.raises(VerifyAccessTokenError) as err: await api_client.verify_access_token(token) - + # Verify the error is about issuer not being allowed assert "not in the list of allowed domains" in str(err.value) - + # Verify no HTTP calls were made (JWKS fetch was skipped) assert len(httpx_mock.get_requests()) == 0 @@ -3115,7 +3115,7 @@ async def test_mcd_discovery_uses_token_issuer(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery for tenant1 (token's issuer) httpx_mock.add_response( method="GET", @@ -3125,7 +3125,7 @@ async def test_mcd_discovery_uses_token_issuer(httpx_mock): "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" } ) - + # Mock JWKS for tenant1 httpx_mock.add_response( method="GET", @@ -3143,18 +3143,18 @@ async def test_mcd_discovery_uses_token_issuer(httpx_mock): ] } ) - + # Create client with multiple domains api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com", "tenant2.auth0.com"], audience="my-audience" )) - + # Verify token - should use tenant1's discovery claims = await api_client.verify_access_token(token) - + assert claims["sub"] == "user123" - + # Verify the correct discovery URL was called requests = httpx_mock.get_requests() discovery_requests = [r for r in requests if 'openid-configuration' in str(r.url)] @@ -3172,7 +3172,7 @@ async def test_mcd_first_issuer_validation(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery that returns DIFFERENT issuer (attack scenario) httpx_mock.add_response( method="GET", @@ -3182,16 +3182,16 @@ async def test_mcd_first_issuer_validation(httpx_mock): "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com"], audience="my-audience" )) - + # Should fail at first issuer validation (before JWKS fetch) with pytest.raises(VerifyAccessTokenError) as err: await api_client.verify_access_token(token) - + assert "token issuer does not match the discovery issuer" in str(err.value).lower() @@ -3204,7 +3204,7 @@ async def test_mcd_discovery_missing_issuer_field(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery WITHOUT issuer field httpx_mock.add_response( method="GET", @@ -3214,15 +3214,15 @@ async def test_mcd_discovery_missing_issuer_field(httpx_mock): # Missing "issuer" field } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com"], audience="my-audience" )) - + with pytest.raises(VerifyAccessTokenError) as err: await api_client.verify_access_token(token) - + assert "missing 'issuer' field" in str(err.value).lower() @@ -3235,7 +3235,7 @@ async def test_mcd_discovery_missing_jwks_uri_field(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery WITHOUT jwks_uri field httpx_mock.add_response( method="GET", @@ -3245,15 +3245,15 @@ async def test_mcd_discovery_missing_jwks_uri_field(httpx_mock): # Missing "jwks_uri" field } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com"], audience="my-audience" )) - + with pytest.raises(VerifyAccessTokenError) as err: await api_client.verify_access_token(token) - + assert "missing 'jwks_uri' field" in str(err.value).lower() @@ -3267,7 +3267,7 @@ async def test_mcd_jwks_fetched_from_issuer_jwks_uri(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery for tenant1 with specific jwks_uri httpx_mock.add_response( method="GET", @@ -3277,7 +3277,7 @@ async def test_mcd_jwks_fetched_from_issuer_jwks_uri(httpx_mock): "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" # Tenant-specific } ) - + # Mock JWKS for tenant1 httpx_mock.add_response( method="GET", @@ -3295,17 +3295,17 @@ async def test_mcd_jwks_fetched_from_issuer_jwks_uri(httpx_mock): ] } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com", "tenant2.auth0.com"], audience="my-audience" )) - + # Verify token - should fetch JWKS from tenant1's specific URI claims = await api_client.verify_access_token(token) - + assert claims["sub"] == "user123" - + # Verify JWKS was fetched from tenant1's specific URI requests = httpx_mock.get_requests() jwks_requests = [r for r in requests if 'jwks.json' in str(r.url)] @@ -3323,7 +3323,7 @@ async def test_mcd_signature_verification_with_correct_key(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery httpx_mock.add_response( method="GET", @@ -3333,7 +3333,7 @@ async def test_mcd_signature_verification_with_correct_key(httpx_mock): "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" } ) - + # Mock JWKS with multiple keys - correct key is TEST_KEY httpx_mock.add_response( method="GET", @@ -3359,15 +3359,15 @@ async def test_mcd_signature_verification_with_correct_key(httpx_mock): ] } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com"], audience="my-audience" )) - + # Should successfully verify using TEST_KEY (not OTHER_KEY) claims = await api_client.verify_access_token(token) - + assert claims["sub"] == "user123" @@ -3381,7 +3381,7 @@ async def test_mcd_jwks_no_matching_kid(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery httpx_mock.add_response( method="GET", @@ -3391,7 +3391,7 @@ async def test_mcd_jwks_no_matching_kid(httpx_mock): "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" } ) - + # Mock JWKS with keys that DON'T match token's kid httpx_mock.add_response( method="GET", @@ -3417,16 +3417,16 @@ async def test_mcd_jwks_no_matching_kid(httpx_mock): ] } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com"], audience="my-audience" )) - + # Should fail with clear error about missing kid with pytest.raises(VerifyAccessTokenError) as err: await api_client.verify_access_token(token) - + assert "no matching key found in jwks" in str(err.value).lower() @@ -3440,14 +3440,14 @@ async def test_mcd_discovery_cached_per_issuer(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + token2 = await generate_token( domain="tenant2.auth0.com", user_id="user456", audience="my-audience", issuer="https://tenant2.auth0.com/" ) - + # Mock discovery for tenant1 httpx_mock.add_response( method="GET", @@ -3457,7 +3457,7 @@ async def test_mcd_discovery_cached_per_issuer(httpx_mock): "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" } ) - + # Mock JWKS for tenant1 httpx_mock.add_response( method="GET", @@ -3475,7 +3475,7 @@ async def test_mcd_discovery_cached_per_issuer(httpx_mock): ] } ) - + # Mock discovery for tenant2 httpx_mock.add_response( method="GET", @@ -3485,7 +3485,7 @@ async def test_mcd_discovery_cached_per_issuer(httpx_mock): "jwks_uri": "https://tenant2.auth0.com/.well-known/jwks.json" } ) - + # Mock JWKS for tenant2 httpx_mock.add_response( method="GET", @@ -3503,19 +3503,19 @@ async def test_mcd_discovery_cached_per_issuer(httpx_mock): ] } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com", "tenant2.auth0.com"], audience="my-audience" )) - + # Verify both tokens - should cache separately claims1 = await api_client.verify_access_token(token1) claims2 = await api_client.verify_access_token(token2) - + assert claims1["sub"] == "user123" assert claims2["sub"] == "user456" - + # Verify both discovery endpoints were called requests = httpx_mock.get_requests() discovery_requests = [r for r in requests if 'openid-configuration' in str(r.url)] @@ -3532,14 +3532,14 @@ async def test_mcd_discovery_cache_hit(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + token2 = await generate_token( domain="tenant1.auth0.com", user_id="user456", audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery for tenant1 (only once) httpx_mock.add_response( method="GET", @@ -3549,7 +3549,7 @@ async def test_mcd_discovery_cache_hit(httpx_mock): "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" } ) - + # Mock JWKS for tenant1 (only once) httpx_mock.add_response( method="GET", @@ -3567,25 +3567,25 @@ async def test_mcd_discovery_cache_hit(httpx_mock): ] } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com"], audience="my-audience" )) - + # Verify first token - fetches from network claims1 = await api_client.verify_access_token(token1) assert claims1["sub"] == "user123" - + # Verify second token - should use cache (no additional HTTP calls) claims2 = await api_client.verify_access_token(token2) assert claims2["sub"] == "user456" - + # Verify discovery was only called once requests = httpx_mock.get_requests() discovery_requests = [r for r in requests if 'openid-configuration' in str(r.url)] assert len(discovery_requests) == 1 - + # Verify JWKS was only called once jwks_requests = [r for r in requests if 'jwks.json' in str(r.url)] assert len(jwks_requests) == 1 @@ -3600,7 +3600,7 @@ async def test_mcd_jwks_cached_per_uri(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery httpx_mock.add_response( method="GET", @@ -3610,7 +3610,7 @@ async def test_mcd_jwks_cached_per_uri(httpx_mock): "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" } ) - + # Mock JWKS httpx_mock.add_response( method="GET", @@ -3628,16 +3628,16 @@ async def test_mcd_jwks_cached_per_uri(httpx_mock): ] } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com"], audience="my-audience" )) - + # Verify token claims = await api_client.verify_access_token(token) assert claims["sub"] == "user123" - + # Verify cache key is based on jwks_uri cache_key = "https://tenant1.auth0.com/.well-known/jwks.json" cached_jwks = api_client._jwks_cache.get(cache_key) @@ -3654,14 +3654,14 @@ async def test_mcd_jwks_cache_hit(httpx_mock): audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + token2 = await generate_token( domain="tenant1.auth0.com", user_id="user456", audience="my-audience", issuer="https://tenant1.auth0.com/" ) - + # Mock discovery (once) httpx_mock.add_response( method="GET", @@ -3671,7 +3671,7 @@ async def test_mcd_jwks_cache_hit(httpx_mock): "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" } ) - + # Mock JWKS (once) httpx_mock.add_response( method="GET", @@ -3689,19 +3689,19 @@ async def test_mcd_jwks_cache_hit(httpx_mock): ] } ) - + api_client = ApiClient(ApiClientOptions( domains=["tenant1.auth0.com"], audience="my-audience" )) - + # Verify both tokens claims1 = await api_client.verify_access_token(token1) claims2 = await api_client.verify_access_token(token2) - + assert claims1["sub"] == "user123" assert claims2["sub"] == "user456" - + # Verify JWKS was only fetched once requests = httpx_mock.get_requests() jwks_requests = [r for r in requests if 'jwks.json' in str(r.url)] @@ -3716,7 +3716,7 @@ async def test_mcd_cache_max_entries_configuration(httpx_mock): audience="my-audience", cache_max_entries=2 )) - + # Verify both caches have correct max_entries assert api_client._discovery_cache._max_entries == 2 assert api_client._jwks_cache._max_entries == 2 diff --git a/tests/test_cache.py b/tests/test_cache.py index eb001e0..b1880af 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -2,7 +2,6 @@ from auth0_api_python.cache import InMemoryCache - # ===== InMemoryCache Basic Operations ===== diff --git a/tests/test_utils.py b/tests/test_utils.py index ed86161..b45d8dc 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -5,9 +5,13 @@ import asyncio import pytest -from auth0_api_python.utils import normalize_domain, get_unverified_payload, parse_cache_control_max_age -from auth0_api_python.token_utils import generate_token +from auth0_api_python.token_utils import generate_token +from auth0_api_python.utils import ( + get_unverified_payload, + normalize_domain, + parse_cache_control_max_age, +) # ===== normalize_domain ===== From e92fade06ac7ac7aeed48f212ca49b7fccd0b82b Mon Sep 17 00:00:00 2001 From: Snehil Kishore Date: Mon, 23 Feb 2026 18:59:27 +0530 Subject: [PATCH 4/8] fix: update imports and improve error handling in tests --- tests/test_api_client.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/tests/test_api_client.py b/tests/test_api_client.py index 24a26f3..5481a68 100644 --- a/tests/test_api_client.py +++ b/tests/test_api_client.py @@ -20,10 +20,12 @@ from freezegun import freeze_time from pytest_httpx import HTTPXMock -from auth0_api_python.api_client import ApiClient +from auth0_api_python.api_client import MAX_ARRAY_VALUES_PER_KEY, ApiClient from auth0_api_python.config import ApiClientOptions from auth0_api_python.errors import ( ApiError, + ConfigurationError, + DomainsResolverError, GetAccessTokenForConnectionError, GetTokenByExchangeProfileError, InvalidAuthSchemeError, @@ -52,7 +54,7 @@ async def test_init_missing_args(): """ Test that providing no audience or domain raises an error. """ - from auth0_api_python.errors import ConfigurationError + # Empty domain now raises ConfigurationError (not MissingRequiredArgumentError) with pytest.raises(ConfigurationError): @@ -2206,7 +2208,7 @@ async def test_get_token_by_exchange_profile_extra_params_denylist(httpx_mock: H @pytest.mark.asyncio async def test_extra_array_exact_limit_passes(mock_discovery, api_client_confidential, httpx_mock): """Test that array with exactly MAX_ARRAY_VALUES_PER_KEY passes.""" - from auth0_api_python.api_client import MAX_ARRAY_VALUES_PER_KEY + httpx_mock.add_response( method="POST", @@ -2230,7 +2232,7 @@ async def test_extra_array_exact_limit_passes(mock_discovery, api_client_confide @pytest.mark.asyncio async def test_extra_array_limit(mock_discovery, api_client_confidential): """Test that array size limit is enforced (DoS protection).""" - from auth0_api_python.api_client import MAX_ARRAY_VALUES_PER_KEY + # Create array exceeding limit big = list(map(str, range(MAX_ARRAY_VALUES_PER_KEY + 1))) @@ -2618,7 +2620,6 @@ async def test_token_response_parsing( assert result["access_token"] == "t" # Verify expires_at calculation (deterministic with frozen time) - import time expected_expires_at = int(time.time()) + expected_expires_in assert result["expires_at"] == expected_expires_at else: @@ -2784,7 +2785,7 @@ async def test_get_token_by_exchange_profile_custom_timeout_honored(httpx_mock: @pytest.mark.asyncio async def test_mcd_init_missing_domain_and_domains(): """Test that providing neither domain nor domains raises ConfigurationError.""" - from auth0_api_python.errors import ConfigurationError + with pytest.raises(ConfigurationError) as err: _ = ApiClient(ApiClientOptions(audience="my-audience")) @@ -2842,7 +2843,7 @@ async def test_mcd_init_with_both_domain_and_domains(): @pytest.mark.asyncio async def test_mcd_init_with_empty_domains_list(): """Test that empty domains list raises ConfigurationError.""" - from auth0_api_python.errors import ConfigurationError + with pytest.raises(ConfigurationError) as err: _ = ApiClient(ApiClientOptions( @@ -2871,7 +2872,7 @@ def my_resolver(context: dict) -> list[str]: @pytest.mark.asyncio async def test_mcd_init_with_invalid_domains_type(): """Test that invalid domains type raises ConfigurationError.""" - from auth0_api_python.errors import ConfigurationError + with pytest.raises(ConfigurationError) as err: _ = ApiClient(ApiClientOptions( @@ -2945,7 +2946,7 @@ def my_resolver(context: dict) -> list[str]: @pytest.mark.asyncio async def test_mcd_resolve_allowed_domains_resolver_error(): """Test that resolver errors are wrapped in DomainsResolverError.""" - from auth0_api_python.errors import DomainsResolverError + def failing_resolver(context: dict) -> list[str]: raise ValueError("Database connection failed") @@ -2965,7 +2966,7 @@ def failing_resolver(context: dict) -> list[str]: @pytest.mark.asyncio async def test_mcd_resolve_allowed_domains_resolver_invalid_return_type(): """Test that resolver must return a list.""" - from auth0_api_python.errors import DomainsResolverError + def bad_resolver(context: dict) -> str: return "tenant1.auth0.com" # Should return list, not string @@ -2984,7 +2985,7 @@ def bad_resolver(context: dict) -> str: @pytest.mark.asyncio async def test_mcd_resolve_allowed_domains_resolver_empty_list(): """Test that resolver cannot return empty list.""" - from auth0_api_python.errors import DomainsResolverError + def empty_resolver(context: dict) -> list[str]: return [] @@ -3045,9 +3046,6 @@ async def test_mcd_resolve_allowed_domains_single_domain_mode(): @pytest.mark.asyncio async def test_mcd_verify_rejects_symmetric_algorithm(): """Test that verify_access_token rejects tokens with symmetric algorithms (HS256).""" - import base64 - import json - # Create a token with HS256 algorithm in header (without actually signing it) header = {"alg": "HS256", "typ": "JWT", "kid": "test-key"} payload = { From ba10d5f2158cc5a64688fa41e835f23e7663b352 Mon Sep 17 00:00:00 2001 From: Snehil Kishore Date: Mon, 23 Feb 2026 19:08:38 +0530 Subject: [PATCH 5/8] fix: update return type of _resolve_allowed_domains method to Optional[list[str]] --- src/auth0_api_python/api_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/auth0_api_python/api_client.py b/src/auth0_api_python/api_client.py index a16b53a..d0cd6bb 100644 --- a/src/auth0_api_python/api_client.py +++ b/src/auth0_api_python/api_client.py @@ -108,7 +108,7 @@ async def _resolve_allowed_domains( unverified_iss: str, request_url: Optional[str] = None, request_headers: Optional[dict] = None - ) -> list[str]: + ) -> Optional[list[str]]: """ Resolve and validate allowed domains for the given issuer. From 18f31d17ca6cfcf8ecf940bf70c5cf9e9b3dca49 Mon Sep 17 00:00:00 2001 From: Snehil Kishore Date: Sat, 28 Feb 2026 00:11:33 +0530 Subject: [PATCH 6/8] feat: enhance domain handling and validation in ApiClient and caching mechanisms --- docs/Caching.md | 4 +- src/auth0_api_python/api_client.py | 18 ++- src/auth0_api_python/cache.py | 16 ++- src/auth0_api_python/types.py | 33 +++-- src/auth0_api_python/utils.py | 29 +++- tests/test_api_client.py | 212 ++++++++++++++++++++++++++++- tests/test_cache.py | 10 ++ tests/test_utils.py | 44 +++++- 8 files changed, 333 insertions(+), 33 deletions(-) diff --git a/docs/Caching.md b/docs/Caching.md index befda62..bd3c784 100644 --- a/docs/Caching.md +++ b/docs/Caching.md @@ -71,7 +71,9 @@ api_client = ApiClient(ApiClientOptions( )) ``` -When a custom adapter is provided, both the discovery cache and JWKS cache use it. Cache keys are inherently distinct — discovery keys are normalized issuer URLs (e.g., `https://tenant.auth0.com/`) and JWKS keys are `jwks_uri` values (e.g., `https://tenant.auth0.com/.well-known/jwks.json`). +When a custom adapter is provided, both the discovery cache and JWKS cache use the same adapter instance. Cache keys are inherently distinct — discovery keys are normalized issuer URLs (e.g., `https://tenant.auth0.com/`) and JWKS keys are `jwks_uri` values (e.g., `https://tenant.auth0.com/.well-known/jwks.json`). + +**Note:** Because both caches share one adapter, entries share the same LRU eviction pool. A JWKS entry could evict a discovery entry (or vice versa) under memory pressure. Set `cache_max_entries` accordingly — recommended: `number_of_issuers × 3`. With the default `InMemoryCache`, discovery and JWKS caches are separate and each gets its own `max_entries` budget. ## Tuning Recommendations diff --git a/src/auth0_api_python/api_client.py b/src/auth0_api_python/api_client.py index d0cd6bb..39b1577 100644 --- a/src/auth0_api_python/api_client.py +++ b/src/auth0_api_python/api_client.py @@ -1,3 +1,4 @@ +import asyncio import time from collections.abc import Mapping, Sequence from typing import Any, Optional, Union @@ -63,6 +64,10 @@ def __init__(self, options: ApiClientOptions): # Static list validation if len(options.domains) == 0: raise ConfigurationError("domains list cannot be empty") + if not all(isinstance(d, str) and d.strip() for d in options.domains): + raise ConfigurationError( + "domains list must contain only non-empty strings" + ) # Normalize and store domains self._allowed_domains = [normalize_domain(d) for d in options.domains] elif callable(options.domains): @@ -145,9 +150,11 @@ async def _resolve_allowed_domains( 'unverified_iss': unverified_iss } - # Invoke resolver + # Invoke resolver (supports both sync and async resolvers) try: result = self._allowed_domains(context) + if asyncio.iscoroutine(result) or asyncio.isfuture(result): + result = await result except Exception as e: raise DomainsResolverError( f"Domains resolver function failed: {str(e)}" @@ -164,6 +171,11 @@ async def _resolve_allowed_domains( "Domains resolver returned an empty list" ) + if not all(isinstance(d, str) and d.strip() for d in result): + raise DomainsResolverError( + "Domains resolver must return a list of non-empty strings" + ) + # Normalize domains from resolver allowed_domains = [normalize_domain(d) for d in result] else: @@ -984,11 +996,11 @@ async def _discover(self, issuer: Optional[str] = None) -> dict[str, Any]: OIDC discovery metadata dictionary """ if issuer: + cache_key = issuer # Already normalized by caller domain = issuer.replace('https://', '').replace('http://', '').rstrip('/') else: domain = self.options.domain - - cache_key = normalize_domain(f"https://{domain}") + cache_key = normalize_domain(f"https://{domain}") cached = self._discovery_cache.get(cache_key) if cached: diff --git a/src/auth0_api_python/cache.py b/src/auth0_api_python/cache.py index 4a9b950..231cdf0 100644 --- a/src/auth0_api_python/cache.py +++ b/src/auth0_api_python/cache.py @@ -1,5 +1,5 @@ +import time from abc import ABC, abstractmethod -from datetime import datetime, timedelta from typing import Any, Optional @@ -74,8 +74,12 @@ class InMemoryCache(CacheAdapter): """ Default in-memory cache implementation with LRU eviction. + Designed for asyncio (single-threaded). + For multi-threaded environments, implement a custom CacheAdapter + with appropriate locking. + Features: - - TTL (time-to-live) support per entry + - TTL (time-to-live) support per entry using monotonic clock - LRU (Least Recently Used) eviction when max_entries reached - No external dependencies @@ -96,7 +100,7 @@ def __init__(self, max_entries: int = 100): Args: max_entries: Maximum number of cache entries (default: 100) """ - self._cache: dict[str, tuple[Any, Optional[datetime]]] = {} + self._cache: dict[str, tuple[Any, Optional[float]]] = {} self._max_entries = max_entries def get(self, key: str) -> Optional[Any]: @@ -116,7 +120,7 @@ def get(self, key: str) -> Optional[Any]: value, expiry = self._cache[key] - if expiry and datetime.now() > expiry: + if expiry is not None and time.monotonic() > expiry: del self._cache[key] return None @@ -145,8 +149,8 @@ def set(self, key: str, value: Any, ttl_seconds: Optional[int] = None) -> None: del self._cache[oldest_key] expiry = None - if ttl_seconds: - expiry = datetime.now() + timedelta(seconds=ttl_seconds) + if ttl_seconds is not None: + expiry = time.monotonic() + ttl_seconds self._cache[key] = (value, expiry) diff --git a/src/auth0_api_python/types.py b/src/auth0_api_python/types.py index 9740f1c..122f59e 100644 --- a/src/auth0_api_python/types.py +++ b/src/auth0_api_python/types.py @@ -2,7 +2,8 @@ Type definitions for auth0-api-python SDK """ -from typing import Callable, Optional, TypedDict +from collections.abc import Awaitable, Callable +from typing import Optional, TypedDict, Union class DomainsResolverContext(TypedDict, total=False): @@ -12,19 +13,20 @@ class DomainsResolverContext(TypedDict, total=False): Attributes: request_url: The URL the API request was made to (optional) request_headers: Request headers dict (e.g., Host, X-Forwarded-Host) (optional) - unverified_iss: The issuer claim from the unverified token (required) + unverified_iss: The issuer claim from the unverified token """ request_url: Optional[str] request_headers: Optional[dict] - unverified_iss: str # This is required, others are optional + unverified_iss: str - -DomainsResolver = Callable[[DomainsResolverContext], list[str]] +DomainsResolver = Callable[ + [DomainsResolverContext], Union[list[str], Awaitable[list[str]]] +] """ Type alias for domains resolver function. -A DomainsResolver is a function that receives a DomainsResolverContext and returns -a list of allowed domain strings. +A DomainsResolver is a sync or async function that receives a DomainsResolverContext +and returns a list of allowed domain strings. Args: context (DomainsResolverContext): Dictionary containing: @@ -35,14 +37,17 @@ class DomainsResolverContext(TypedDict, total=False): Returns: list[str]: List of allowed domain strings (e.g., ['tenant.auth0.com']) -Example: +Example (sync): from auth0_api_python import DomainsResolverContext def my_resolver(context: DomainsResolverContext) -> list[str]: - unverified_iss = context['unverified_iss'] - request_url = context.get('request_url') - request_headers = context.get('request_headers') - - # Fetch allowed domains based on context - return ['tenant1.auth0.com', 'tenant2.auth0.com'] + host = (context.get('request_headers') or {}).get('host') + if host == 'api.brand.com': + return ['brand.custom-domain.com'] + return ['tenant.auth0.com'] + +Example (async): + async def my_async_resolver(context: DomainsResolverContext) -> list[str]: + domains = await db.lookup_domains(context['unverified_iss']) + return domains """ diff --git a/src/auth0_api_python/utils.py b/src/auth0_api_python/utils.py index 7c27176..72fbef8 100644 --- a/src/auth0_api_python/utils.py +++ b/src/auth0_api_python/utils.py @@ -53,10 +53,33 @@ def normalize_domain(domain: str) -> str: Normalized issuer URL (e.g., "https://tenant.auth0.com/") """ + if not isinstance(domain, str) or not domain.strip(): + raise ValueError("domain must be a non-empty string") + domain = domain.strip().lower() - domain = domain.replace('http://', '').replace('https://', '') - domain = domain.rstrip('/') - return f"https://{domain}/" + + # Reject http:// explicitly + if domain.startswith('http://'): + raise ValueError("invalid domain URL (https required)") + + # Strip https:// prefix + domain = domain.replace('https://', '') + + # Split host from any path/query/fragment + host = domain.split('/')[0].split('?')[0].split('#')[0] + + # Reject credentials + if '@' in host: + raise ValueError("invalid domain URL (credentials are not allowed)") + + # Check for path segments, query, or fragment + bare = domain.rstrip('/') + if bare != host: + raise ValueError( + "invalid domain URL (path/query/fragment are not allowed)" + ) + + return f"https://{host}/" async def fetch_oidc_metadata( diff --git a/tests/test_api_client.py b/tests/test_api_client.py index 5481a68..7280cce 100644 --- a/tests/test_api_client.py +++ b/tests/test_api_client.py @@ -1,7 +1,6 @@ import base64 import json import time -from datetime import datetime import httpx import pytest @@ -2883,6 +2882,28 @@ async def test_mcd_init_with_invalid_domains_type(): assert "must be either a list" in str(err.value) +@pytest.mark.asyncio +async def test_mcd_init_with_non_string_domains_list(): + """Test that domains list with non-string items raises ConfigurationError.""" + with pytest.raises(ConfigurationError, match="non-empty strings"): + ApiClient(ApiClientOptions( + domains=[123, "tenant.auth0.com"], + audience="my-audience" + )) + + with pytest.raises(ConfigurationError, match="non-empty strings"): + ApiClient(ApiClientOptions( + domains=[None], + audience="my-audience" + )) + + with pytest.raises(ConfigurationError, match="non-empty strings"): + ApiClient(ApiClientOptions( + domains=["tenant.auth0.com", ""], + audience="my-audience" + )) + + @pytest.mark.asyncio async def test_mcd_resolve_allowed_domains_static_list(): """Test _resolve_allowed_domains with static list.""" @@ -3043,6 +3064,53 @@ async def test_mcd_resolve_allowed_domains_single_domain_mode(): assert result is None +@pytest.mark.asyncio +async def test_mcd_async_resolver_success(): + """Test that async resolver functions are properly awaited.""" + async def async_resolver(context): + return ["tenant1.auth0.com", "tenant2.auth0.com"] + + api_client = ApiClient(ApiClientOptions( + domains=async_resolver, + audience="my-audience" + )) + + result = await api_client._resolve_allowed_domains( + "https://tenant1.auth0.com/" + ) + assert result == ["https://tenant1.auth0.com/", "https://tenant2.auth0.com/"] + + +@pytest.mark.asyncio +async def test_mcd_async_resolver_error(): + """Test that errors from async resolvers are wrapped in DomainsResolverError.""" + async def failing_resolver(context): + raise RuntimeError("database connection lost") + + api_client = ApiClient(ApiClientOptions( + domains=failing_resolver, + audience="my-audience" + )) + + with pytest.raises(DomainsResolverError, match="database connection lost"): + await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") + + +@pytest.mark.asyncio +async def test_mcd_resolver_returns_non_string_items(): + """Test that resolver returning non-string items raises DomainsResolverError.""" + def bad_resolver(context): + return [123, None] + + api_client = ApiClient(ApiClientOptions( + domains=bad_resolver, + audience="my-audience" + )) + + with pytest.raises(DomainsResolverError, match="non-empty strings"): + await api_client._resolve_allowed_domains("https://tenant1.auth0.com/") + + @pytest.mark.asyncio async def test_mcd_verify_rejects_symmetric_algorithm(): """Test that verify_access_token rejects tokens with symmetric algorithms (HS256).""" @@ -3193,6 +3261,64 @@ async def test_mcd_first_issuer_validation(httpx_mock): assert "token issuer does not match the discovery issuer" in str(err.value).lower() +@pytest.mark.asyncio +async def test_mcd_second_issuer_validation(httpx_mock): + """Test second issuer validation: verified claims iss must match discovery issuer exactly. + + The first validation compares normalized issuers (case-insensitive, trailing slash). + The second validation (post-signature) compares raw strings. This catches subtle + mismatches that normalization hides — a defense-in-depth guard against token tampering. + """ + # Generate token with MIXED CASE issuer — normalizes to same value as discovery + # but raw string differs from discovery's lowercase issuer + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://Tenant1.Auth0.Com" # Mixed case, no trailing slash + ) + + # Mock discovery returning LOWERCASE issuer (standard Auth0 format) + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", # Lowercase + trailing slash + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS (signature verification must pass for second check to be reached) + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=["tenant1.auth0.com"], + audience="my-audience" + )) + + # First validation passes (normalized forms match), but second validation + # fails because raw claims["iss"] != discovery_issuer + with pytest.raises(VerifyAccessTokenError) as err: + await api_client.verify_access_token(token) + + assert "verified token issuer does not match the discovery issuer" in str(err.value).lower() + + @pytest.mark.asyncio async def test_mcd_discovery_missing_issuer_field(httpx_mock): """Test that missing issuer field in discovery causes clear error.""" @@ -3782,19 +3908,97 @@ async def test_effective_ttl_from_cache_control(httpx_mock, max_age_header, conf cache_ttl_seconds=configured_ttl, )) - before = datetime.now() + before = time.monotonic() await api_client.verify_access_token(token) # Inspect discovery cache entry expiry discovery_key = "https://tenant1.auth0.com/" _, discovery_expiry = api_client._discovery_cache._cache[discovery_key] - discovery_ttl = (discovery_expiry - before).total_seconds() + discovery_ttl = discovery_expiry - before assert abs(discovery_ttl - expected_ttl) < 2 # Inspect JWKS cache entry expiry jwks_key = "https://tenant1.auth0.com/.well-known/jwks.json" _, jwks_expiry = api_client._jwks_cache._cache[jwks_key] - jwks_ttl = (jwks_expiry - before).total_seconds() + jwks_ttl = jwks_expiry - before assert abs(jwks_ttl - expected_ttl) < 2 +# ===== MCD: verify_request() Integration ===== + + +@pytest.mark.asyncio +async def test_mcd_verify_request_with_resolver_context(httpx_mock): + """Test that verify_request() forwards request_url and request_headers to the resolver. + + This tests the actual user-facing API path: verify_request() → verify_access_token() + → _resolve_allowed_domains() with resolver context populated from verify_request args. + """ + received_contexts = [] + + def capturing_resolver(context): + """Resolver that captures the context it receives.""" + received_contexts.append(context) + return ["tenant1.auth0.com"] + + token = await generate_token( + domain="tenant1.auth0.com", + user_id="user123", + audience="my-audience", + issuer="https://tenant1.auth0.com/" + ) + + # Mock discovery + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/openid-configuration", + json={ + "issuer": "https://tenant1.auth0.com/", + "jwks_uri": "https://tenant1.auth0.com/.well-known/jwks.json" + } + ) + + # Mock JWKS + httpx_mock.add_response( + method="GET", + url="https://tenant1.auth0.com/.well-known/jwks.json", + json={ + "keys": [ + { + "kty": "RSA", + "kid": "TEST_KEY", + "n": "whYOFK2Ocbbpb_zVypi9SeKiNUqKQH0zTKN1-6fpCTu6ZalGI82s7XK3tan4dJt90ptUPKD2zvxqTzFNfx4HHHsrYCf2-FMLn1VTJfQazA2BvJqAwcpW1bqRUEty8tS_Yv4hRvWfQPcc2Gc3-_fQOOW57zVy-rNoJc744kb30NjQxdGp03J2S3GLQu7oKtSDDPooQHD38PEMNnITf0pj-KgDPjymkMGoJlO3aKppsjfbt_AH6GGdRghYRLOUwQU-h-ofWHR3lbYiKtXPn5dN24kiHy61e3VAQ9_YAZlwXC_99GGtw_NpghFAuM4P1JDn0DppJldy3PGFC0GfBCZASw", + "e": "AQAB", + "alg": "RS256", + "use": "sig" + } + ] + } + ) + + api_client = ApiClient(ApiClientOptions( + domains=capturing_resolver, + audience="my-audience" + )) + + request_headers = { + "authorization": f"Bearer {token}", + "x-custom-header": "test-value" + } + + claims = await api_client.verify_request( + headers=request_headers, + http_url="https://api.example.com/users" + ) + + assert claims["sub"] == "user123" + + # Verify the resolver received the correct context + assert len(received_contexts) == 1 + ctx = received_contexts[0] + assert ctx["request_url"] == "https://api.example.com/users" + assert ctx["request_headers"]["authorization"] == f"Bearer {token}" + assert ctx["request_headers"]["x-custom-header"] == "test-value" + assert ctx["unverified_iss"] == "https://tenant1.auth0.com/" + + diff --git a/tests/test_cache.py b/tests/test_cache.py index b1880af..0da582f 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -111,6 +111,16 @@ def test_in_memory_cache_no_ttl(): assert cache.get("key1") == "value1" +def test_in_memory_cache_ttl_zero_expires_immediately(): + """Test that ttl_seconds=0 means entries expire immediately (always refetch).""" + cache = InMemoryCache() + + cache.set("key1", "value1", ttl_seconds=0) + + # Entry should be immediately expired — forces refetch on next access + assert cache.get("key1") is None + + # ===== LRU Eviction ===== diff --git a/tests/test_utils.py b/tests/test_utils.py index b45d8dc..65cc09f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -27,8 +27,9 @@ def test_normalize_domain_with_https(): def test_normalize_domain_with_http(): - """Test normalization of domain with http:// prefix (converts to https).""" - assert normalize_domain("http://tenant.auth0.com") == "https://tenant.auth0.com/" + """Test that http:// prefix is rejected (https required).""" + with pytest.raises(ValueError, match="https required"): + normalize_domain("http://tenant.auth0.com") def test_normalize_domain_with_trailing_slash(): @@ -64,6 +65,45 @@ def test_normalize_domain_custom_domain(): def test_normalize_domain_multiple_slashes(): """Test normalization with multiple trailing slashes.""" assert normalize_domain("tenant.auth0.com///") == "https://tenant.auth0.com/" + + +def test_normalize_domain_rejects_path(): + """Test that domain with path segments is rejected.""" + with pytest.raises(ValueError, match="path/query/fragment are not allowed"): + normalize_domain("tenant.auth0.com/some/path") + + +def test_normalize_domain_rejects_query(): + """Test that domain with query string is rejected.""" + with pytest.raises(ValueError, match="path/query/fragment are not allowed"): + normalize_domain("tenant.auth0.com?foo=bar") + + +def test_normalize_domain_rejects_fragment(): + """Test that domain with fragment is rejected.""" + with pytest.raises(ValueError, match="path/query/fragment are not allowed"): + normalize_domain("tenant.auth0.com#section") + + +def test_normalize_domain_rejects_credentials(): + """Test that domain with credentials is rejected.""" + with pytest.raises(ValueError, match="credentials are not allowed"): + normalize_domain("user:pass@tenant.auth0.com") + + +def test_normalize_domain_rejects_http(): + """Test that http:// scheme is rejected (must use https).""" + with pytest.raises(ValueError, match="https required"): + normalize_domain("http://tenant.auth0.com") + + +def test_normalize_domain_rejects_empty(): + """Test that empty and whitespace-only strings are rejected.""" + with pytest.raises(ValueError, match="non-empty string"): + normalize_domain("") + + with pytest.raises(ValueError, match="non-empty string"): + normalize_domain(" ") # ===== get_unverified_payload ===== From aa317042395e0cee7bd03382f66d8a0a38d72e7e Mon Sep 17 00:00:00 2001 From: Snehil Kishore Date: Sat, 28 Feb 2026 00:19:11 +0530 Subject: [PATCH 7/8] fix: improve domain resolver handling in ApiClient and clean up test formatting --- src/auth0_api_python/api_client.py | 3 ++- tests/test_utils.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/auth0_api_python/api_client.py b/src/auth0_api_python/api_client.py index 39b1577..e1585e9 100644 --- a/src/auth0_api_python/api_client.py +++ b/src/auth0_api_python/api_client.py @@ -143,6 +143,7 @@ async def _resolve_allowed_domains( allowed_domains = self._allowed_domains # Dynamic resolver mode elif callable(self._allowed_domains): + resolver = self._allowed_domains # Build resolver context context = { 'request_url': request_url, @@ -152,7 +153,7 @@ async def _resolve_allowed_domains( # Invoke resolver (supports both sync and async resolvers) try: - result = self._allowed_domains(context) + result = resolver(context) if asyncio.iscoroutine(result) or asyncio.isfuture(result): result = await result except Exception as e: diff --git a/tests/test_utils.py b/tests/test_utils.py index 65cc09f..7e90808 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -65,7 +65,7 @@ def test_normalize_domain_custom_domain(): def test_normalize_domain_multiple_slashes(): """Test normalization with multiple trailing slashes.""" assert normalize_domain("tenant.auth0.com///") == "https://tenant.auth0.com/" - + def test_normalize_domain_rejects_path(): """Test that domain with path segments is rejected.""" From 229cdb33b5a9acce933107b11a0b5e19df447250 Mon Sep 17 00:00:00 2001 From: Snehil Kishore Date: Sat, 28 Feb 2026 00:36:02 +0530 Subject: [PATCH 8/8] revert: last changes --- src/auth0_api_python/api_client.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/auth0_api_python/api_client.py b/src/auth0_api_python/api_client.py index e1585e9..39b1577 100644 --- a/src/auth0_api_python/api_client.py +++ b/src/auth0_api_python/api_client.py @@ -143,7 +143,6 @@ async def _resolve_allowed_domains( allowed_domains = self._allowed_domains # Dynamic resolver mode elif callable(self._allowed_domains): - resolver = self._allowed_domains # Build resolver context context = { 'request_url': request_url, @@ -153,7 +152,7 @@ async def _resolve_allowed_domains( # Invoke resolver (supports both sync and async resolvers) try: - result = resolver(context) + result = self._allowed_domains(context) if asyncio.iscoroutine(result) or asyncio.isfuture(result): result = await result except Exception as e: