diff --git a/sqlspec/loader.py b/sqlspec/loader.py index 28f2a6d9..5f05323d 100644 --- a/sqlspec/loader.py +++ b/sqlspec/loader.py @@ -278,8 +278,8 @@ def _read_file_content(self, path: str | Path) -> str: if file_path and len(file_path) > 2 and file_path[2] == ":": # noqa: PLR2004 file_path = file_path[1:] filename = Path(file_path).name - return backend.read_text(filename, encoding=self.encoding) - return backend.read_text(path_str, encoding=self.encoding) + return backend.read_text_sync(filename, encoding=self.encoding) + return backend.read_text_sync(path_str, encoding=self.encoding) except KeyError as e: raise SQLFileNotFoundError(path_str) from e except FileNotFoundInStorageError as e: diff --git a/sqlspec/protocols.py b/sqlspec/protocols.py index 1a5bc5b5..cbfe8229 100644 --- a/sqlspec/protocols.py +++ b/sqlspec/protocols.py @@ -476,7 +476,10 @@ def get_data(self) -> Any: ... @runtime_checkable class ObjectStoreProtocol(Protocol): - """Protocol for object storage operations.""" + """Protocol for object storage operations. + + All synchronous methods use the `*_sync` suffix for consistency with async methods. + """ protocol: str backend_type: str @@ -484,75 +487,75 @@ class ObjectStoreProtocol(Protocol): def __init__(self, uri: str, **kwargs: Any) -> None: return - def read_bytes(self, path: "str | Path", **kwargs: Any) -> bytes: - """Read bytes from an object.""" + def read_bytes_sync(self, path: "str | Path", **kwargs: Any) -> bytes: + """Read bytes from an object synchronously.""" return b"" - def write_bytes(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: - """Write bytes to an object.""" + def write_bytes_sync(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: + """Write bytes to an object synchronously.""" return - def read_text(self, path: "str | Path", encoding: str = "utf-8", **kwargs: Any) -> str: - """Read text from an object.""" + def read_text_sync(self, path: "str | Path", encoding: str = "utf-8", **kwargs: Any) -> str: + """Read text from an object synchronously.""" return "" - def write_text(self, path: "str | Path", data: str, encoding: str = "utf-8", **kwargs: Any) -> None: - """Write text to an object.""" + def write_text_sync(self, path: "str | Path", data: str, encoding: str = "utf-8", **kwargs: Any) -> None: + """Write text to an object synchronously.""" return - def exists(self, path: "str | Path", **kwargs: Any) -> bool: - """Check if an object exists.""" + def exists_sync(self, path: "str | Path", **kwargs: Any) -> bool: + """Check if an object exists synchronously.""" return False - def delete(self, path: "str | Path", **kwargs: Any) -> None: - """Delete an object.""" + def delete_sync(self, path: "str | Path", **kwargs: Any) -> None: + """Delete an object synchronously.""" return - def copy(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: - """Copy an object.""" + def copy_sync(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: + """Copy an object synchronously.""" return - def move(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: - """Move an object.""" + def move_sync(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: + """Move an object synchronously.""" return - def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> list[str]: - """List objects with optional prefix.""" + def list_objects_sync(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> list[str]: + """List objects with optional prefix synchronously.""" return [] - def glob(self, pattern: str, **kwargs: Any) -> list[str]: - """Find objects matching a glob pattern.""" + def glob_sync(self, pattern: str, **kwargs: Any) -> list[str]: + """Find objects matching a glob pattern synchronously.""" return [] - def is_object(self, path: "str | Path") -> bool: - """Check if path points to an object.""" + def is_object_sync(self, path: "str | Path") -> bool: + """Check if path points to an object synchronously.""" return False - def is_path(self, path: "str | Path") -> bool: - """Check if path points to a prefix (directory-like).""" + def is_path_sync(self, path: "str | Path") -> bool: + """Check if path points to a prefix (directory-like) synchronously.""" return False - def get_metadata(self, path: "str | Path", **kwargs: Any) -> dict[str, object]: - """Get object metadata.""" + def get_metadata_sync(self, path: "str | Path", **kwargs: Any) -> dict[str, object]: + """Get object metadata synchronously.""" return {} - def read_arrow(self, path: "str | Path", **kwargs: Any) -> "ArrowTable": - """Read an Arrow table from storage.""" + def read_arrow_sync(self, path: "str | Path", **kwargs: Any) -> "ArrowTable": + """Read an Arrow table from storage synchronously.""" msg = "Arrow reading not implemented" raise NotImplementedError(msg) - def write_arrow(self, path: "str | Path", table: "ArrowTable", **kwargs: Any) -> None: - """Write an Arrow table to storage.""" + def write_arrow_sync(self, path: "str | Path", table: "ArrowTable", **kwargs: Any) -> None: + """Write an Arrow table to storage synchronously.""" msg = "Arrow writing not implemented" raise NotImplementedError(msg) - def stream_arrow(self, pattern: str, **kwargs: Any) -> "Iterator[ArrowRecordBatch]": - """Stream Arrow record batches from matching objects.""" + def stream_arrow_sync(self, pattern: str, **kwargs: Any) -> "Iterator[ArrowRecordBatch]": + """Stream Arrow record batches from matching objects synchronously.""" msg = "Arrow streaming not implemented" raise NotImplementedError(msg) - def stream_read(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> "Iterator[bytes]": - """Stream bytes from an object.""" + def stream_read_sync(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> "Iterator[bytes]": + """Stream bytes from an object synchronously.""" msg = "Stream reading not implemented" raise NotImplementedError(msg) diff --git a/sqlspec/storage/backends/base.py b/sqlspec/storage/backends/base.py index 12b2b434..3c2bd9a5 100644 --- a/sqlspec/storage/backends/base.py +++ b/sqlspec/storage/backends/base.py @@ -9,6 +9,7 @@ from typing_extensions import Self from sqlspec.typing import ArrowRecordBatch, ArrowTable +from sqlspec.utils.sync_tools import CapacityLimiter __all__ = ( "AsyncArrowBatchIterator", @@ -17,8 +18,38 @@ "AsyncObStoreStreamIterator", "AsyncThreadedBytesIterator", "ObjectStoreBase", + "storage_limiter", ) +# Dedicated capacity limiter for storage I/O operations (100 concurrent ops) +# This is shared across all storage backends to prevent overwhelming the system +storage_limiter = CapacityLimiter(100) + + +class _ExhaustedSentinel: + """Sentinel value to signal iterator exhaustion across thread boundaries. + + StopIteration cannot be raised into asyncio Futures, so we use this sentinel + to signal iterator exhaustion from the thread pool back to the async context. + """ + + __slots__ = () + + +_EXHAUSTED = _ExhaustedSentinel() + + +def _next_or_sentinel(iterator: "Iterator[Any]") -> "Any": + """Get next item or return sentinel if exhausted. + + This helper wraps next() to catch StopIteration in the thread, + since StopIteration cannot propagate through asyncio Futures. + """ + try: + return next(iterator) + except StopIteration: + return _EXHAUSTED + class AsyncArrowBatchIterator: """Async iterator wrapper for sync Arrow batch iterators. @@ -47,16 +78,19 @@ def __aiter__(self) -> "AsyncArrowBatchIterator": async def __anext__(self) -> "ArrowRecordBatch": """Get the next item from the iterator asynchronously. + Uses asyncio.to_thread to offload the blocking next() call + to a thread pool, preventing event loop blocking. + Returns: The next Arrow record batch. Raises: StopAsyncIteration: When the iterator is exhausted. """ - try: - return next(self._sync_iter) - except StopIteration: - raise StopAsyncIteration from None + result = await asyncio.to_thread(_next_or_sentinel, self._sync_iter) + if result is _EXHAUSTED: + raise StopAsyncIteration + return cast("ArrowRecordBatch", result) class AsyncBytesIterator: @@ -309,93 +343,97 @@ async def __anext__(self) -> bytes: @mypyc_attr(allow_interpreted_subclasses=True) class ObjectStoreBase(ABC): - """Base class for storage backends.""" + """Base class for storage backends. + + All synchronous methods follow the *_sync naming convention for consistency + with their async counterparts. + """ __slots__ = () @abstractmethod - def read_bytes(self, path: str, **kwargs: Any) -> bytes: - """Read bytes from storage.""" + def read_bytes_sync(self, path: str, **kwargs: Any) -> bytes: + """Read bytes from storage synchronously.""" raise NotImplementedError @abstractmethod - def write_bytes(self, path: str, data: bytes, **kwargs: Any) -> None: - """Write bytes to storage.""" + def write_bytes_sync(self, path: str, data: bytes, **kwargs: Any) -> None: + """Write bytes to storage synchronously.""" raise NotImplementedError @abstractmethod - def stream_read(self, path: str, chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: - """Stream bytes from storage.""" + def stream_read_sync(self, path: str, chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: + """Stream bytes from storage synchronously.""" raise NotImplementedError @abstractmethod - def read_text(self, path: str, encoding: str = "utf-8", **kwargs: Any) -> str: - """Read text from storage.""" + def read_text_sync(self, path: str, encoding: str = "utf-8", **kwargs: Any) -> str: + """Read text from storage synchronously.""" raise NotImplementedError @abstractmethod - def write_text(self, path: str, data: str, encoding: str = "utf-8", **kwargs: Any) -> None: - """Write text to storage.""" + def write_text_sync(self, path: str, data: str, encoding: str = "utf-8", **kwargs: Any) -> None: + """Write text to storage synchronously.""" raise NotImplementedError @abstractmethod - def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": - """List objects in storage.""" + def list_objects_sync(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": + """List objects in storage synchronously.""" raise NotImplementedError @abstractmethod - def exists(self, path: str, **kwargs: Any) -> bool: - """Check if object exists in storage.""" + def exists_sync(self, path: str, **kwargs: Any) -> bool: + """Check if object exists in storage synchronously.""" raise NotImplementedError @abstractmethod - def delete(self, path: str, **kwargs: Any) -> None: - """Delete object from storage.""" + def delete_sync(self, path: str, **kwargs: Any) -> None: + """Delete object from storage synchronously.""" raise NotImplementedError @abstractmethod - def copy(self, source: str, destination: str, **kwargs: Any) -> None: - """Copy object within storage.""" + def copy_sync(self, source: str, destination: str, **kwargs: Any) -> None: + """Copy object within storage synchronously.""" raise NotImplementedError @abstractmethod - def move(self, source: str, destination: str, **kwargs: Any) -> None: - """Move object within storage.""" + def move_sync(self, source: str, destination: str, **kwargs: Any) -> None: + """Move object within storage synchronously.""" raise NotImplementedError @abstractmethod - def glob(self, pattern: str, **kwargs: Any) -> "list[str]": - """Find objects matching pattern.""" + def glob_sync(self, pattern: str, **kwargs: Any) -> "list[str]": + """Find objects matching pattern synchronously.""" raise NotImplementedError @abstractmethod - def get_metadata(self, path: str, **kwargs: Any) -> "dict[str, object]": - """Get object metadata from storage.""" + def get_metadata_sync(self, path: str, **kwargs: Any) -> "dict[str, object]": + """Get object metadata from storage synchronously.""" raise NotImplementedError @abstractmethod - def is_object(self, path: str) -> bool: - """Check if path points to an object.""" + def is_object_sync(self, path: str) -> bool: + """Check if path points to an object synchronously.""" raise NotImplementedError @abstractmethod - def is_path(self, path: str) -> bool: - """Check if path points to a directory.""" + def is_path_sync(self, path: str) -> bool: + """Check if path points to a directory synchronously.""" raise NotImplementedError @abstractmethod - def read_arrow(self, path: str, **kwargs: Any) -> ArrowTable: - """Read Arrow table from storage.""" + def read_arrow_sync(self, path: str, **kwargs: Any) -> ArrowTable: + """Read Arrow table from storage synchronously.""" raise NotImplementedError @abstractmethod - def write_arrow(self, path: str, table: ArrowTable, **kwargs: Any) -> None: - """Write Arrow table to storage.""" + def write_arrow_sync(self, path: str, table: ArrowTable, **kwargs: Any) -> None: + """Write Arrow table to storage synchronously.""" raise NotImplementedError @abstractmethod - def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator[ArrowRecordBatch]: - """Stream Arrow record batches from storage.""" + def stream_arrow_sync(self, pattern: str, **kwargs: Any) -> Iterator[ArrowRecordBatch]: + """Stream Arrow record batches from storage synchronously.""" raise NotImplementedError @abstractmethod @@ -426,7 +464,7 @@ async def stream_read_async( raise NotImplementedError @abstractmethod - def list_objects_async(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": + async def list_objects_async(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": """List objects in storage asynchronously.""" raise NotImplementedError @@ -451,7 +489,7 @@ async def move_async(self, source: str, destination: str, **kwargs: Any) -> None raise NotImplementedError @abstractmethod - def get_metadata_async(self, path: str, **kwargs: Any) -> "dict[str, object]": + async def get_metadata_async(self, path: str, **kwargs: Any) -> "dict[str, object]": """Get object metadata from storage asynchronously.""" raise NotImplementedError diff --git a/sqlspec/storage/backends/fsspec.py b/sqlspec/storage/backends/fsspec.py index a11c2e78..1c2bee18 100644 --- a/sqlspec/storage/backends/fsspec.py +++ b/sqlspec/storage/backends/fsspec.py @@ -68,6 +68,8 @@ class FSSpecBackend: Implements ObjectStoreProtocol using fsspec for various protocols including HTTP, HTTPS, FTP, and cloud storage services. + + All synchronous methods use the *_sync suffix for consistency with async methods. """ __slots__ = ("_fs_uri", "backend_type", "base_path", "fs", "protocol") @@ -155,8 +157,8 @@ def base_uri(self) -> str: def _resolve_path(self, path: str | Path) -> str: return resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=False) - def read_bytes(self, path: str | Path, **kwargs: Any) -> bytes: - """Read bytes from an object.""" + def read_bytes_sync(self, path: str | Path, **kwargs: Any) -> bytes: + """Read bytes from an object synchronously.""" resolved_path = self._resolve_path(path) result = cast( "bytes", @@ -176,8 +178,8 @@ def read_bytes(self, path: str | Path, **kwargs: Any) -> bytes: ) return result - def write_bytes(self, path: str | Path, data: bytes, **kwargs: Any) -> None: - """Write bytes to an object.""" + def write_bytes_sync(self, path: str | Path, data: bytes, **kwargs: Any) -> None: + """Write bytes to an object synchronously.""" resolved_path = self._resolve_path(path) if self.protocol == "file": @@ -199,17 +201,17 @@ def write_bytes(self, path: str | Path, data: bytes, **kwargs: Any) -> None: path=resolved_path, ) - def read_text(self, path: str | Path, encoding: str = "utf-8", **kwargs: Any) -> str: - """Read text from an object.""" - data = self.read_bytes(path, **kwargs) + def read_text_sync(self, path: str | Path, encoding: str = "utf-8", **kwargs: Any) -> str: + """Read text from an object synchronously.""" + data = self.read_bytes_sync(path, **kwargs) return data.decode(encoding) - def write_text(self, path: str | Path, data: str, encoding: str = "utf-8", **kwargs: Any) -> None: - """Write text to an object.""" - self.write_bytes(path, data.encode(encoding), **kwargs) + def write_text_sync(self, path: str | Path, data: str, encoding: str = "utf-8", **kwargs: Any) -> None: + """Write text to an object synchronously.""" + self.write_bytes_sync(path, data.encode(encoding), **kwargs) - def exists(self, path: str | Path, **kwargs: Any) -> bool: - """Check if an object exists.""" + def exists_sync(self, path: str | Path, **kwargs: Any) -> bool: + """Check if an object exists synchronously.""" resolved_path = self._resolve_path(path) exists = bool(self.fs.exists(resolved_path, **kwargs)) _log_storage_event( @@ -222,8 +224,8 @@ def exists(self, path: str | Path, **kwargs: Any) -> bool: ) return exists - def delete(self, path: str | Path, **kwargs: Any) -> None: - """Delete an object.""" + def delete_sync(self, path: str | Path, **kwargs: Any) -> None: + """Delete an object synchronously.""" resolved_path = self._resolve_path(path) execute_sync_storage_operation( partial(self.fs.rm, resolved_path, **kwargs), @@ -239,8 +241,8 @@ def delete(self, path: str | Path, **kwargs: Any) -> None: path=resolved_path, ) - def copy(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: - """Copy an object.""" + def copy_sync(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: + """Copy an object synchronously.""" source_path = self._resolve_path(source) dest_path = self._resolve_path(destination) execute_sync_storage_operation( @@ -258,8 +260,8 @@ def copy(self, source: str | Path, destination: str | Path, **kwargs: Any) -> No destination_path=dest_path, ) - def move(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: - """Move an object.""" + def move_sync(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: + """Move an object synchronously.""" source_path = self._resolve_path(source) dest_path = self._resolve_path(destination) execute_sync_storage_operation( @@ -277,8 +279,8 @@ def move(self, source: str | Path, destination: str | Path, **kwargs: Any) -> No destination_path=dest_path, ) - def read_arrow(self, path: str | Path, **kwargs: Any) -> "ArrowTable": - """Read an Arrow table from storage.""" + def read_arrow_sync(self, path: str | Path, **kwargs: Any) -> "ArrowTable": + """Read an Arrow table from storage synchronously.""" pq = import_pyarrow_parquet() resolved_path = self._resolve_path(path) @@ -300,8 +302,8 @@ def read_arrow(self, path: str | Path, **kwargs: Any) -> "ArrowTable": ) return result - def write_arrow(self, path: str | Path, table: "ArrowTable", **kwargs: Any) -> None: - """Write an Arrow table to storage.""" + def write_arrow_sync(self, path: str | Path, table: "ArrowTable", **kwargs: Any) -> None: + """Write an Arrow table to storage synchronously.""" pq = import_pyarrow_parquet() resolved_path = self._resolve_path(path) @@ -324,8 +326,8 @@ def _read_parquet_table(self, resolved_path: str, pq: Any, options: "dict[str, A with self.fs.open(resolved_path, mode="rb", **options) as file_obj: return pq.read_table(file_obj) - def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": - """List objects with optional prefix.""" + def list_objects_sync(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": + """List objects with optional prefix synchronously.""" resolved_prefix = resolve_storage_path(prefix, self.base_path, self.protocol, strip_file_scheme=False) if recursive: results = sorted(self.fs.find(resolved_prefix, **kwargs)) @@ -341,8 +343,8 @@ def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) ) return results - def glob(self, pattern: str, **kwargs: Any) -> "list[str]": - """Find objects matching a glob pattern.""" + def glob_sync(self, pattern: str, **kwargs: Any) -> "list[str]": + """Find objects matching a glob pattern synchronously.""" resolved_pattern = resolve_storage_path(pattern, self.base_path, self.protocol, strip_file_scheme=False) results = sorted(self.fs.glob(resolved_pattern, **kwargs)) # pyright: ignore _log_storage_event( @@ -355,18 +357,18 @@ def glob(self, pattern: str, **kwargs: Any) -> "list[str]": ) return results - def is_object(self, path: str | Path) -> bool: - """Check if path points to an object.""" + def is_object_sync(self, path: str | Path) -> bool: + """Check if path points to an object synchronously.""" resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=False) return self.fs.exists(resolved_path) and not self.fs.isdir(resolved_path) - def is_path(self, path: str | Path) -> bool: - """Check if path points to a prefix (directory-like).""" + def is_path_sync(self, path: str | Path) -> bool: + """Check if path points to a prefix (directory-like) synchronously.""" resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=False) return self.fs.isdir(resolved_path) # type: ignore[no-any-return] - def get_metadata(self, path: str | Path, **kwargs: Any) -> "dict[str, object]": - """Get object metadata.""" + def get_metadata_sync(self, path: str | Path, **kwargs: Any) -> "dict[str, object]": + """Get object metadata synchronously.""" resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=False) try: info = self.fs.info(resolved_path, **kwargs) @@ -422,8 +424,8 @@ def sign_sync( ) raise NotImplementedError(msg) - def stream_read(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: - """Stream bytes from storage.""" + def stream_read_sync(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: + """Stream bytes from storage synchronously.""" resolved_path = self._resolve_path(path) chunk_size = chunk_size or 65536 @@ -434,8 +436,8 @@ def stream_read(self, path: "str | Path", chunk_size: "int | None" = None, **kwa break yield chunk - def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator["ArrowRecordBatch"]: - """Stream Arrow record batches from storage. + def stream_arrow_sync(self, pattern: str, **kwargs: Any) -> Iterator["ArrowRecordBatch"]: + """Stream Arrow record batches from storage synchronously. Args: pattern: The glob pattern to match. @@ -445,7 +447,7 @@ def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator["ArrowRecordBatc Arrow record batches from matching files. """ pq = import_pyarrow_parquet() - for obj_path in self.glob(pattern, **kwargs): + for obj_path in self.glob_sync(pattern, **kwargs): file_handle = execute_sync_storage_operation( partial(self.fs.open, obj_path, mode="rb"), backend=self.backend_type, @@ -463,11 +465,11 @@ def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator["ArrowRecordBatc async def read_bytes_async(self, path: "str | Path", **kwargs: Any) -> bytes: """Read bytes from storage asynchronously.""" - return await async_(self.read_bytes)(path, **kwargs) + return await async_(self.read_bytes_sync)(path, **kwargs) async def write_bytes_async(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: """Write bytes to storage asynchronously.""" - return await async_(self.write_bytes)(path, data, **kwargs) + return await async_(self.write_bytes_sync)(path, data, **kwargs) async def stream_read_async( self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any @@ -504,39 +506,39 @@ def stream_arrow_async(self, pattern: str, **kwargs: Any) -> AsyncIterator["Arro Returns: AsyncIterator yielding Arrow record batches. """ - return AsyncArrowBatchIterator(self.stream_arrow(pattern, **kwargs)) + return AsyncArrowBatchIterator(self.stream_arrow_sync(pattern, **kwargs)) async def read_text_async(self, path: "str | Path", encoding: str = "utf-8", **kwargs: Any) -> str: """Read text from storage asynchronously.""" - return await async_(self.read_text)(path, encoding, **kwargs) + return await async_(self.read_text_sync)(path, encoding, **kwargs) async def write_text_async(self, path: str | Path, data: str, encoding: str = "utf-8", **kwargs: Any) -> None: """Write text to storage asynchronously.""" - await async_(self.write_text)(path, data, encoding, **kwargs) + await async_(self.write_text_sync)(path, data, encoding, **kwargs) async def list_objects_async(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": """List objects in storage asynchronously.""" - return await async_(self.list_objects)(prefix, recursive, **kwargs) + return await async_(self.list_objects_sync)(prefix, recursive, **kwargs) async def exists_async(self, path: str | Path, **kwargs: Any) -> bool: """Check if object exists in storage asynchronously.""" - return await async_(self.exists)(path, **kwargs) + return await async_(self.exists_sync)(path, **kwargs) async def delete_async(self, path: str | Path, **kwargs: Any) -> None: """Delete object from storage asynchronously.""" - await async_(self.delete)(path, **kwargs) + await async_(self.delete_sync)(path, **kwargs) async def copy_async(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: """Copy object in storage asynchronously.""" - await async_(self.copy)(source, destination, **kwargs) + await async_(self.copy_sync)(source, destination, **kwargs) async def move_async(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: """Move object in storage asynchronously.""" - await async_(self.move)(source, destination, **kwargs) + await async_(self.move_sync)(source, destination, **kwargs) async def get_metadata_async(self, path: str | Path, **kwargs: Any) -> "dict[str, object]": """Get object metadata from storage asynchronously.""" - return await async_(self.get_metadata)(path, **kwargs) + return await async_(self.get_metadata_sync)(path, **kwargs) @overload async def sign_async(self, paths: str, expires_in: int = 3600, for_upload: bool = False) -> str: ... @@ -551,9 +553,15 @@ async def sign_async( return await async_(self.sign_sync)(paths, expires_in, for_upload) # type: ignore[arg-type] async def read_arrow_async(self, path: str | Path, **kwargs: Any) -> "ArrowTable": - """Read Arrow table from storage asynchronously.""" - return self.read_arrow(path, **kwargs) + """Read Arrow table from storage asynchronously. + + Uses async_() with storage limiter to offload blocking PyArrow I/O to thread pool. + """ + return await async_(self.read_arrow_sync)(path, **kwargs) async def write_arrow_async(self, path: str | Path, table: "ArrowTable", **kwargs: Any) -> None: - """Write Arrow table to storage asynchronously.""" - self.write_arrow(path, table, **kwargs) + """Write Arrow table to storage asynchronously. + + Uses async_() with storage limiter to offload blocking PyArrow I/O to thread pool. + """ + await async_(self.write_arrow_sync)(path, table, **kwargs) diff --git a/sqlspec/storage/backends/local.py b/sqlspec/storage/backends/local.py index 852c3e63..4afa023e 100644 --- a/sqlspec/storage/backends/local.py +++ b/sqlspec/storage/backends/local.py @@ -4,7 +4,6 @@ No external dependencies like fsspec or obstore required. """ -import asyncio import shutil from collections.abc import AsyncIterator, Iterator from functools import partial @@ -16,7 +15,7 @@ from sqlspec.exceptions import FileNotFoundInStorageError from sqlspec.storage._utils import import_pyarrow_parquet -from sqlspec.storage.backends.base import AsyncArrowBatchIterator, AsyncChunkedBytesIterator +from sqlspec.storage.backends.base import AsyncArrowBatchIterator, AsyncThreadedBytesIterator from sqlspec.storage.errors import execute_sync_storage_operation from sqlspec.utils.sync_tools import async_ @@ -54,6 +53,11 @@ def _write_local_arrow(resolved: "Path", table: "ArrowTable", pq: Any, options: pq.write_table(table, str(resolved), **options) # pyright: ignore +def _open_file_for_read(path: Path) -> Any: + """Open a file for binary reading.""" + return path.open("rb") + + @mypyc_attr(allow_interpreted_subclasses=True) class LocalStore: """Simple local file system storage backend. @@ -61,10 +65,10 @@ class LocalStore: Provides file system operations without requiring fsspec or obstore. Supports file:// URIs and regular file paths. - Implements ObjectStoreProtocol for type safety. + All synchronous methods use the *_sync suffix for consistency with async methods. """ - __slots__ = ("_loop", "backend_type", "base_path", "protocol") + __slots__ = ("backend_type", "base_path", "protocol") def __init__(self, uri: str = "", **kwargs: Any) -> None: """Initialize local storage backend. @@ -100,7 +104,6 @@ def __init__(self, uri: str = "", **kwargs: Any) -> None: self.base_path.mkdir(parents=True, exist_ok=True) elif self.base_path.is_file(): self.base_path = self.base_path.parent - self._loop: asyncio.AbstractEventLoop | None = None self.protocol = "file" self.backend_type = "local" @@ -117,8 +120,8 @@ def _resolve_path(self, path: "str | Path") -> Path: p = Path(path) return p if p.is_absolute() else self.base_path / p - def read_bytes(self, path: "str | Path", **kwargs: Any) -> bytes: - """Read bytes from file.""" + def read_bytes_sync(self, path: "str | Path", **kwargs: Any) -> bytes: + """Read bytes from file synchronously.""" resolved = self._resolve_path(path) try: return execute_sync_storage_operation( @@ -127,8 +130,8 @@ def read_bytes(self, path: "str | Path", **kwargs: Any) -> bytes: except FileNotFoundInStorageError as error: raise FileNotFoundError(str(resolved)) from error - def write_bytes(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: - """Write bytes to file.""" + def write_bytes_sync(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: + """Write bytes to file synchronously.""" resolved = self._resolve_path(path) execute_sync_storage_operation( @@ -138,18 +141,18 @@ def write_bytes(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: path=str(resolved), ) - def read_text(self, path: "str | Path", encoding: str = "utf-8", **kwargs: Any) -> str: - """Read text from file.""" - data = self.read_bytes(path, **kwargs) + def read_text_sync(self, path: "str | Path", encoding: str = "utf-8", **kwargs: Any) -> str: + """Read text from file synchronously.""" + data = self.read_bytes_sync(path, **kwargs) return data.decode(encoding) - def write_text(self, path: "str | Path", data: str, encoding: str = "utf-8", **kwargs: Any) -> None: - """Write text to file.""" + def write_text_sync(self, path: "str | Path", data: str, encoding: str = "utf-8", **kwargs: Any) -> None: + """Write text to file synchronously.""" encoded = data.encode(encoding) - self.write_bytes(path, encoded, **kwargs) + self.write_bytes_sync(path, encoded, **kwargs) - def stream_read(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: - """Stream bytes from file.""" + def stream_read_sync(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: + """Stream bytes from file synchronously.""" resolved = self._resolve_path(path) chunk_size = chunk_size or 65536 try: @@ -162,18 +165,14 @@ def stream_read(self, path: "str | Path", chunk_size: "int | None" = None, **kwa except FileNotFoundError as error: raise FileNotFoundError(str(resolved)) from error - def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": - """List objects in directory. + def list_objects_sync(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": + """List objects in directory synchronously. Args: prefix: Optional prefix that may look like a directory or filename filter. recursive: Whether to walk subdirectories. **kwargs: Additional backend-specific options (currently unused). - Args: - prefix: Optional prefix that may look like a directory or filename filter. - recursive: Whether to walk subdirectories. - When the prefix resembles a directory (contains a slash or ends with '/'), we treat it as a path; otherwise we filter filenames within the base path. Paths outside base_path are returned with their absolute names. """ @@ -202,20 +201,20 @@ def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) return sorted(files) - def exists(self, path: "str | Path", **kwargs: Any) -> bool: - """Check if file exists.""" + def exists_sync(self, path: "str | Path", **kwargs: Any) -> bool: + """Check if file exists synchronously.""" return self._resolve_path(path).exists() - def delete(self, path: "str | Path", **kwargs: Any) -> None: - """Delete file or directory.""" + def delete_sync(self, path: "str | Path", **kwargs: Any) -> None: + """Delete file or directory synchronously.""" resolved = self._resolve_path(path) execute_sync_storage_operation( partial(_delete_local_path, resolved), backend=self.backend_type, operation="delete", path=str(resolved) ) - def copy(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: - """Copy file or directory.""" + def copy_sync(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: + """Copy file or directory synchronously.""" src = self._resolve_path(source) dst = self._resolve_path(destination) dst.parent.mkdir(parents=True, exist_ok=True) @@ -224,8 +223,8 @@ def copy(self, source: "str | Path", destination: "str | Path", **kwargs: Any) - partial(_copy_local_path, src, dst), backend=self.backend_type, operation="copy", path=f"{src}->{dst}" ) - def move(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: - """Move file or directory.""" + def move_sync(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: + """Move file or directory synchronously.""" src = self._resolve_path(source) dst = self._resolve_path(destination) dst.parent.mkdir(parents=True, exist_ok=True) @@ -233,8 +232,8 @@ def move(self, source: "str | Path", destination: "str | Path", **kwargs: Any) - partial(shutil.move, str(src), str(dst)), backend=self.backend_type, operation="move", path=f"{src}->{dst}" ) - def glob(self, pattern: str, **kwargs: Any) -> "list[str]": - """Find files matching pattern. + def glob_sync(self, pattern: str, **kwargs: Any) -> "list[str]": + """Find files matching pattern synchronously. Supports both relative and absolute patterns by adjusting where the glob search begins. """ @@ -256,8 +255,8 @@ def glob(self, pattern: str, **kwargs: Any) -> "list[str]": return sorted(results) - def get_metadata(self, path: "str | Path", **kwargs: Any) -> "dict[str, object]": - """Get file metadata.""" + def get_metadata_sync(self, path: "str | Path", **kwargs: Any) -> "dict[str, object]": + """Get file metadata synchronously.""" resolved = self._resolve_path(path) return execute_sync_storage_operation( partial(self._collect_metadata, resolved), @@ -280,16 +279,16 @@ def _collect_metadata(self, resolved: "Path") -> "dict[str, object]": "path": str(resolved), } - def is_object(self, path: "str | Path") -> bool: - """Check if path points to a file.""" + def is_object_sync(self, path: "str | Path") -> bool: + """Check if path points to a file synchronously.""" return self._resolve_path(path).is_file() - def is_path(self, path: "str | Path") -> bool: - """Check if path points to a directory.""" + def is_path_sync(self, path: "str | Path") -> bool: + """Check if path points to a directory synchronously.""" return self._resolve_path(path).is_dir() - def read_arrow(self, path: "str | Path", **kwargs: Any) -> "ArrowTable": - """Read Arrow table from file.""" + def read_arrow_sync(self, path: "str | Path", **kwargs: Any) -> "ArrowTable": + """Read Arrow table from file synchronously.""" pq = import_pyarrow_parquet() resolved = self._resolve_path(path) return cast( @@ -302,8 +301,8 @@ def read_arrow(self, path: "str | Path", **kwargs: Any) -> "ArrowTable": ), ) - def write_arrow(self, path: "str | Path", table: "ArrowTable", **kwargs: Any) -> None: - """Write Arrow table to file.""" + def write_arrow_sync(self, path: "str | Path", table: "ArrowTable", **kwargs: Any) -> None: + """Write Arrow table to file synchronously.""" pq = import_pyarrow_parquet() resolved = self._resolve_path(path) @@ -314,14 +313,14 @@ def write_arrow(self, path: "str | Path", table: "ArrowTable", **kwargs: Any) -> path=str(resolved), ) - def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator["ArrowRecordBatch"]: - """Stream Arrow record batches from files matching pattern. + def stream_arrow_sync(self, pattern: str, **kwargs: Any) -> Iterator["ArrowRecordBatch"]: + """Stream Arrow record batches from files matching pattern synchronously. Yields: Arrow record batches from matching files. """ pq = import_pyarrow_parquet() - files = self.glob(pattern) + files = self.glob_sync(pattern) for file_path in files: resolved = self._resolve_path(file_path) resolved_str = str(resolved) @@ -365,26 +364,26 @@ def sign_sync( async def read_bytes_async(self, path: "str | Path", **kwargs: Any) -> bytes: """Read bytes from file asynchronously.""" - return await async_(self.read_bytes)(path, **kwargs) + return await async_(self.read_bytes_sync)(path, **kwargs) async def write_bytes_async(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: """Write bytes to file asynchronously.""" - await async_(self.write_bytes)(path, data, **kwargs) + await async_(self.write_bytes_sync)(path, data, **kwargs) async def read_text_async(self, path: "str | Path", encoding: str = "utf-8", **kwargs: Any) -> str: """Read text from file asynchronously.""" - return await async_(self.read_text)(path, encoding, **kwargs) + return await async_(self.read_text_sync)(path, encoding, **kwargs) async def write_text_async(self, path: "str | Path", data: str, encoding: str = "utf-8", **kwargs: Any) -> None: """Write text to file asynchronously.""" - await async_(self.write_text)(path, data, encoding, **kwargs) + await async_(self.write_text_sync)(path, data, encoding, **kwargs) async def stream_read_async( self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any ) -> AsyncIterator[bytes]: """Stream bytes from file asynchronously. - Uses asyncio.to_thread() to run blocking file I/O in a thread pool, + Uses AsyncThreadedBytesIterator to offload blocking file I/O to a thread pool, ensuring the event loop is not blocked during read operations. Args: @@ -396,54 +395,60 @@ async def stream_read_async( AsyncIterator yielding chunks of bytes. """ resolved = self._resolve_path(path) - # Run blocking I/O in thread pool to avoid blocking event loop - data = await asyncio.to_thread(resolved.read_bytes) - return AsyncChunkedBytesIterator(data, chunk_size or 65536) + chunk_size = chunk_size or 65536 + # Open file in thread pool to avoid blocking, then use threaded iterator + file_obj = await async_(_open_file_for_read)(resolved) + return AsyncThreadedBytesIterator(file_obj, chunk_size) async def list_objects_async(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": """List objects asynchronously.""" - return await async_(self.list_objects)(prefix, recursive, **kwargs) + return await async_(self.list_objects_sync)(prefix, recursive, **kwargs) async def exists_async(self, path: "str | Path", **kwargs: Any) -> bool: """Check if file exists asynchronously.""" - return await async_(self.exists)(path, **kwargs) + return await async_(self.exists_sync)(path, **kwargs) async def delete_async(self, path: "str | Path", **kwargs: Any) -> None: """Delete file asynchronously.""" - await async_(self.delete)(path, **kwargs) + await async_(self.delete_sync)(path, **kwargs) async def copy_async(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: """Copy file asynchronously.""" - await async_(self.copy)(source, destination, **kwargs) + await async_(self.copy_sync)(source, destination, **kwargs) async def move_async(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: """Move file asynchronously.""" - await async_(self.move)(source, destination, **kwargs) + await async_(self.move_sync)(source, destination, **kwargs) async def get_metadata_async(self, path: "str | Path", **kwargs: Any) -> "dict[str, object]": """Get file metadata asynchronously.""" - return await async_(self.get_metadata)(path, **kwargs) + return await async_(self.get_metadata_sync)(path, **kwargs) async def read_arrow_async(self, path: "str | Path", **kwargs: Any) -> "ArrowTable": - """Read Arrow table asynchronously.""" - return self.read_arrow(path, **kwargs) + """Read Arrow table asynchronously. + + Uses async_() to offload blocking PyArrow I/O to thread pool. + """ + return await async_(self.read_arrow_sync)(path, **kwargs) async def write_arrow_async(self, path: "str | Path", table: "ArrowTable", **kwargs: Any) -> None: - """Write Arrow table asynchronously.""" - self.write_arrow(path, table, **kwargs) + """Write Arrow table asynchronously. + + Uses async_() to offload blocking PyArrow I/O to thread pool. + """ + await async_(self.write_arrow_sync)(path, table, **kwargs) def stream_arrow_async(self, pattern: str, **kwargs: Any) -> AsyncIterator["ArrowRecordBatch"]: """Stream Arrow record batches asynchronously. Args: pattern: Glob pattern to match files. - **kwargs: Additional arguments passed to stream_arrow(). + **kwargs: Additional arguments passed to stream_arrow_sync(). Returns: AsyncIterator yielding Arrow record batches. """ - - return AsyncArrowBatchIterator(self.stream_arrow(pattern, **kwargs)) + return AsyncArrowBatchIterator(self.stream_arrow_sync(pattern, **kwargs)) @overload async def sign_async(self, paths: str, expires_in: int = 3600, for_upload: bool = False) -> str: ... diff --git a/sqlspec/storage/backends/obstore.py b/sqlspec/storage/backends/obstore.py index 56b523c4..1843008a 100644 --- a/sqlspec/storage/backends/obstore.py +++ b/sqlspec/storage/backends/obstore.py @@ -24,6 +24,7 @@ from sqlspec.typing import ArrowRecordBatch, ArrowTable from sqlspec.utils.logging import get_logger, log_with_context from sqlspec.utils.module_loader import ensure_obstore +from sqlspec.utils.sync_tools import async_ __all__ = ("ObStoreBackend",) @@ -74,6 +75,8 @@ class ObStoreBackend: Implements ObjectStoreProtocol using obstore's Rust-based implementation for storage operations. Supports AWS S3, Google Cloud Storage, Azure Blob Storage, local filesystem, and HTTP endpoints. + + All synchronous methods use the *_sync suffix for consistency with async methods. """ __slots__ = ( @@ -196,8 +199,8 @@ def _resolve_path_for_local_store(self, path: "str | Path") -> str: return str(path) - def read_bytes(self, path: "str | Path", **kwargs: Any) -> bytes: # pyright: ignore[reportUnusedParameter] - """Read bytes using obstore.""" + def read_bytes_sync(self, path: "str | Path", **kwargs: Any) -> bytes: # pyright: ignore[reportUnusedParameter] + """Read bytes using obstore synchronously.""" resolved_path = self._resolve_path(path) result = execute_sync_storage_operation( @@ -215,8 +218,8 @@ def read_bytes(self, path: "str | Path", **kwargs: Any) -> bytes: # pyright: ig ) return result - def write_bytes(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] - """Write bytes using obstore.""" + def write_bytes_sync(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + """Write bytes using obstore synchronously.""" resolved_path = self._resolve_path(path) execute_sync_storage_operation( @@ -233,16 +236,16 @@ def write_bytes(self, path: "str | Path", data: bytes, **kwargs: Any) -> None: path=resolved_path, ) - def read_text(self, path: "str | Path", encoding: str = "utf-8", **kwargs: Any) -> str: - """Read text using obstore.""" - return self.read_bytes(path, **kwargs).decode(encoding) + def read_text_sync(self, path: "str | Path", encoding: str = "utf-8", **kwargs: Any) -> str: + """Read text using obstore synchronously.""" + return self.read_bytes_sync(path, **kwargs).decode(encoding) - def write_text(self, path: "str | Path", data: str, encoding: str = "utf-8", **kwargs: Any) -> None: - """Write text using obstore.""" - self.write_bytes(path, data.encode(encoding), **kwargs) + def write_text_sync(self, path: "str | Path", data: str, encoding: str = "utf-8", **kwargs: Any) -> None: + """Write text using obstore synchronously.""" + self.write_bytes_sync(path, data.encode(encoding), **kwargs) - def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": # pyright: ignore[reportUnusedParameter] - """List objects using obstore.""" + def list_objects_sync(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> "list[str]": # pyright: ignore[reportUnusedParameter] + """List objects using obstore synchronously.""" # For LocalStore, the base_path is already included in the store root, # so we use empty prefix when none is given. For cloud stores, use base_path. if prefix: @@ -263,8 +266,8 @@ def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) ) return paths - def exists(self, path: "str | Path", **kwargs: Any) -> bool: # pyright: ignore[reportUnusedParameter] - """Check if object exists using obstore.""" + def exists_sync(self, path: "str | Path", **kwargs: Any) -> bool: # pyright: ignore[reportUnusedParameter] + """Check if object exists using obstore synchronously.""" try: resolved_path = self._resolve_path(path) self.store.head(resolved_path) # pyright: ignore[reportUnknownMemberType] @@ -288,8 +291,8 @@ def exists(self, path: "str | Path", **kwargs: Any) -> bool: # pyright: ignore[ ) return True - def delete(self, path: "str | Path", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] - """Delete object using obstore.""" + def delete_sync(self, path: "str | Path", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + """Delete object using obstore synchronously.""" resolved_path = self._resolve_path(path) execute_sync_storage_operation( partial(self.store.delete, resolved_path), backend=self.backend_type, operation="delete", path=resolved_path @@ -302,8 +305,8 @@ def delete(self, path: "str | Path", **kwargs: Any) -> None: # pyright: ignore[ path=resolved_path, ) - def copy(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] - """Copy object using obstore.""" + def copy_sync(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + """Copy object using obstore synchronously.""" source_path = self._resolve_path(source) dest_path = self._resolve_path(destination) execute_sync_storage_operation( @@ -321,8 +324,8 @@ def copy(self, source: "str | Path", destination: "str | Path", **kwargs: Any) - destination_path=dest_path, ) - def move(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] - """Move object using obstore.""" + def move_sync(self, source: "str | Path", destination: "str | Path", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + """Move object using obstore synchronously.""" source_path = self._resolve_path(source) dest_path = self._resolve_path(destination) execute_sync_storage_operation( @@ -340,14 +343,14 @@ def move(self, source: "str | Path", destination: "str | Path", **kwargs: Any) - destination_path=dest_path, ) - def glob(self, pattern: str, **kwargs: Any) -> "list[str]": - """Find objects matching pattern. + def glob_sync(self, pattern: str, **kwargs: Any) -> "list[str]": + """Find objects matching pattern synchronously. Lists all objects and filters them client-side using the pattern. """ resolved_pattern = resolve_storage_path(pattern, self.base_path, self.protocol, strip_file_scheme=True) - all_objects = self.list_objects(recursive=True, **kwargs) + all_objects = self.list_objects_sync(recursive=True, **kwargs) if "**" in pattern: matching_objects = [] @@ -377,8 +380,8 @@ def glob(self, pattern: str, **kwargs: Any) -> "list[str]": ) return results - def get_metadata(self, path: "str | Path", **kwargs: Any) -> "dict[str, object]": # pyright: ignore[reportUnusedParameter] - """Get object metadata using obstore.""" + def get_metadata_sync(self, path: "str | Path", **kwargs: Any) -> "dict[str, object]": # pyright: ignore[reportUnusedParameter] + """Get object metadata using obstore synchronously.""" resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=True) try: @@ -413,29 +416,29 @@ def get_metadata(self, path: "str | Path", **kwargs: Any) -> "dict[str, object]" return result - def is_object(self, path: "str | Path") -> bool: - """Check if path is an object using obstore.""" + def is_object_sync(self, path: "str | Path") -> bool: + """Check if path is an object using obstore synchronously.""" resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=True) - return self.exists(path) and not resolved_path.endswith("/") + return self.exists_sync(path) and not resolved_path.endswith("/") - def is_path(self, path: "str | Path") -> bool: - """Check if path is a prefix/directory using obstore.""" + def is_path_sync(self, path: "str | Path") -> bool: + """Check if path is a prefix/directory using obstore synchronously.""" resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=True) if resolved_path.endswith("/"): return True try: - objects = self.list_objects(prefix=str(path), recursive=True) + objects = self.list_objects_sync(prefix=str(path), recursive=True) return len(objects) > 0 except Exception: return False - def read_arrow(self, path: "str | Path", **kwargs: Any) -> ArrowTable: - """Read Arrow table using obstore.""" + def read_arrow_sync(self, path: "str | Path", **kwargs: Any) -> ArrowTable: + """Read Arrow table using obstore synchronously.""" pq = import_pyarrow_parquet() resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=True) - data = self.read_bytes(resolved_path) + data = self.read_bytes_sync(resolved_path) result = cast( "ArrowTable", execute_sync_storage_operation( @@ -454,8 +457,8 @@ def read_arrow(self, path: "str | Path", **kwargs: Any) -> ArrowTable: ) return result - def write_arrow(self, path: "str | Path", table: ArrowTable, **kwargs: Any) -> None: - """Write Arrow table using obstore.""" + def write_arrow_sync(self, path: "str | Path", table: ArrowTable, **kwargs: Any) -> None: + """Write Arrow table using obstore synchronously.""" pa = import_pyarrow() pq = import_pyarrow_parquet() resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=True) @@ -483,7 +486,7 @@ def write_arrow(self, path: "str | Path", table: ArrowTable, **kwargs: Any) -> N path=resolved_path, ) buffer.seek(0) - self.write_bytes(resolved_path, buffer.read()) + self.write_bytes_sync(resolved_path, buffer.read()) _log_storage_event( "storage.write", backend_type=self.backend_type, @@ -492,44 +495,51 @@ def write_arrow(self, path: "str | Path", table: ArrowTable, **kwargs: Any) -> N path=resolved_path, ) - def stream_read(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: - """Stream bytes using obstore. + def stream_read_sync(self, path: "str | Path", chunk_size: "int | None" = None, **kwargs: Any) -> Iterator[bytes]: + """Stream bytes using obstore's native streaming synchronously. - Note: - For remote backends, this currently performs a full read and yields chunks - as obstore's sync client doesn't expose a streaming iterator. - Use stream_read_async for true streaming. + Uses obstore's sync streaming iterator which yields chunks without + loading the entire file into memory, for both local and remote backends. """ resolved_path = self._resolve_path(path) - data = self.read_bytes(resolved_path) + chunk_size = chunk_size or 65536 - if chunk_size: - for i in range(0, len(data), chunk_size): - yield data[i : i + chunk_size] - else: - yield data + result = execute_sync_storage_operation( + partial(self.store.get, resolved_path), + backend=self.backend_type, + operation="stream_read", + path=resolved_path, + ) - def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator[ArrowRecordBatch]: - """Stream Arrow record batches. + # Use obstore's native streaming - yields Buffer objects + # GetResult.stream(min_chunk_size) returns an iterator of chunks + for chunk in result.stream(min_chunk_size=chunk_size): + yield bytes(chunk) # Convert Buffer to bytes - Yields: - Iterator of Arrow record batches from matching objects. + def stream_arrow_sync(self, pattern: str, **kwargs: Any) -> Iterator[ArrowRecordBatch]: + """Stream Arrow record batches using obstore's native streaming synchronously. + + For each matching file, streams data through a buffered wrapper + that PyArrow can read directly without loading the entire file. """ pq = import_pyarrow_parquet() - for obj_path in self.glob(pattern, **kwargs): + for obj_path in self.glob_sync(pattern, **kwargs): resolved_path = resolve_storage_path(obj_path, self.base_path, self.protocol, strip_file_scheme=True) result = execute_sync_storage_operation( partial(self.store.get, resolved_path), backend=self.backend_type, - operation="stream_read", + operation="stream_arrow", path=resolved_path, ) - bytes_obj = result.bytes() - data = bytes_obj.to_bytes() - buffer = io.BytesIO(data) - parquet_file = execute_sync_storage_operation( - partial(pq.ParquetFile, buffer), backend=self.backend_type, operation="stream_arrow", path=resolved_path - ) + + # Create a file-like object that streams from obstore + # PyArrow's ParquetFile needs a seekable file, so we buffer the stream + buffer = io.BytesIO() + for chunk in result.stream(): + buffer.write(chunk) + buffer.seek(0) + + parquet_file = pq.ParquetFile(buffer) yield from parquet_file.iter_batches() @property @@ -814,11 +824,17 @@ async def get_metadata_async(self, path: "str | Path", **kwargs: Any) -> "dict[s return result async def read_arrow_async(self, path: "str | Path", **kwargs: Any) -> ArrowTable: - """Read Arrow table from storage asynchronously.""" + """Read Arrow table from storage asynchronously. + + Uses async_() with storage limiter to offload blocking PyArrow I/O to thread pool. + """ pq = import_pyarrow_parquet() resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=True) data = await self.read_bytes_async(resolved_path) - result = cast("ArrowTable", pq.read_table(io.BytesIO(data), **kwargs)) + + # Offload PyArrow parsing to thread pool + result = await async_(pq.read_table)(io.BytesIO(data), **kwargs) + _log_storage_event( "storage.read", backend_type=self.backend_type, @@ -827,16 +843,26 @@ async def read_arrow_async(self, path: "str | Path", **kwargs: Any) -> ArrowTabl mode="async", path=resolved_path, ) - return result + return cast("ArrowTable", result) async def write_arrow_async(self, path: "str | Path", table: ArrowTable, **kwargs: Any) -> None: - """Write Arrow table to storage asynchronously.""" + """Write Arrow table to storage asynchronously. + + Uses async_() with storage limiter to offload blocking PyArrow serialization + to thread pool, preventing event loop blocking. + """ pq = import_pyarrow_parquet() resolved_path = resolve_storage_path(path, self.base_path, self.protocol, strip_file_scheme=True) - buffer = io.BytesIO() - pq.write_table(table, buffer, **kwargs) - buffer.seek(0) - await self.write_bytes_async(resolved_path, buffer.read()) + + def _serialize() -> bytes: + buffer = io.BytesIO() + pq.write_table(table, buffer, **kwargs) + buffer.seek(0) + return buffer.read() + + data = await async_(_serialize)() + await self.write_bytes_async(resolved_path, data) + _log_storage_event( "storage.write", backend_type=self.backend_type, @@ -851,13 +877,13 @@ def stream_arrow_async(self, pattern: str, **kwargs: Any) -> AsyncIterator["Arro Args: pattern: Glob pattern to match files. - **kwargs: Additional arguments passed to stream_arrow(). + **kwargs: Additional arguments passed to stream_arrow_sync(). Returns: AsyncIterator yielding Arrow record batches. """ resolved_pattern = resolve_storage_path(pattern, self.base_path, self.protocol, strip_file_scheme=True) - return AsyncArrowBatchIterator(self.stream_arrow(resolved_pattern, **kwargs)) + return AsyncArrowBatchIterator(self.stream_arrow_sync(resolved_pattern, **kwargs)) @overload async def sign_async(self, paths: str, expires_in: int = 3600, for_upload: bool = False) -> str: ... diff --git a/sqlspec/storage/pipeline.py b/sqlspec/storage/pipeline.py index 8813c827..3bdf6c5d 100644 --- a/sqlspec/storage/pipeline.py +++ b/sqlspec/storage/pipeline.py @@ -223,18 +223,20 @@ def _encode_arrow_payload(table: "ArrowTable", format_choice: StorageFormat, *, def _delete_backend_sync(backend: "ObjectStoreProtocol", path: str, *, backend_name: str) -> None: - execute_sync_storage_operation(partial(backend.delete, path), backend=backend_name, operation="delete", path=path) + execute_sync_storage_operation( + partial(backend.delete_sync, path), backend=backend_name, operation="delete", path=path + ) def _write_backend_sync(backend: "ObjectStoreProtocol", path: str, payload: bytes, *, backend_name: str) -> None: execute_sync_storage_operation( - partial(backend.write_bytes, path, payload), backend=backend_name, operation="write_bytes", path=path + partial(backend.write_bytes_sync, path, payload), backend=backend_name, operation="write_bytes", path=path ) def _read_backend_sync(backend: "ObjectStoreProtocol", path: str, *, backend_name: str) -> bytes: return execute_sync_storage_operation( - partial(backend.read_bytes, path), backend=backend_name, operation="read_bytes", path=path + partial(backend.read_bytes_sync, path), backend=backend_name, operation="read_bytes", path=path ) @@ -384,7 +386,7 @@ def stream_read( ) -> "Iterator[bytes]": """Stream bytes from an artifact.""" backend, path = self._resolve_backend(source, storage_options) - return backend.stream_read(path, chunk_size=chunk_size) + return backend.stream_read_sync(path, chunk_size=chunk_size) def allocate_staging_artifacts(self, requests: "list[StorageLoadRequest]") -> "list[StagedArtifact]": """Allocate staging metadata for upcoming loads.""" diff --git a/sqlspec/utils/fixtures.py b/sqlspec/utils/fixtures.py index ecead1c2..9634bfcf 100644 --- a/sqlspec/utils/fixtures.py +++ b/sqlspec/utils/fixtures.py @@ -206,10 +206,10 @@ def write_fixture( if compress: file_path = f"{table_name}.json.gz" content = gzip.compress(json_content.encode("utf-8")) - storage.write_bytes(file_path, content) + storage.write_bytes_sync(file_path, content) else: file_path = f"{table_name}.json" - storage.write_text(file_path, json_content) + storage.write_text_sync(file_path, json_content) async def write_fixture_async( diff --git a/sqlspec/utils/sync_tools.py b/sqlspec/utils/sync_tools.py index d2ffbe70..8d6bb8f4 100644 --- a/sqlspec/utils/sync_tools.py +++ b/sqlspec/utils/sync_tools.py @@ -247,9 +247,10 @@ def __init__(self, function: "Callable[ParamSpecT, ReturnT]", limiter: "Capacity async def __call__(self, *args: "ParamSpecT.args", **kwargs: "ParamSpecT.kwargs") -> "ReturnT": partial_f = functools.partial(self._function, *args, **kwargs) - used_limiter = self._limiter or _default_limiter - async with used_limiter: - return await asyncio.to_thread(partial_f) + if self._limiter is not None: + async with self._limiter: + return await asyncio.to_thread(partial_f) + return await asyncio.to_thread(partial_f) class _EnsureAsyncWrapper(Generic[ParamSpecT, ReturnT]): diff --git a/tests/integration/storage/test_integration.py b/tests/integration/storage/test_integration.py index ecb4ab2d..6ec9899f 100644 --- a/tests/integration/storage/test_integration.py +++ b/tests/integration/storage/test_integration.py @@ -105,14 +105,14 @@ def test_local_store_file_operations(local_test_setup: Path) -> None: store = LocalStore(str(local_test_setup)) # Test exists - assert store.exists("test.txt") - assert not store.exists("nonexistent.txt") + assert store.exists_sync("test.txt") + assert not store.exists_sync("nonexistent.txt") # Test read operations - text_content = store.read_text("test.txt") + text_content = store.read_text_sync("test.txt") assert text_content == TEST_TEXT_CONTENT - binary_content = store.read_bytes("test.bin") + binary_content = store.read_bytes_sync("test.bin") assert binary_content == TEST_BINARY_CONTENT @@ -125,13 +125,13 @@ def test_local_store_write_operations(local_test_setup: Path) -> None: # Test write text new_text = "New text content" - store.write_text("new.txt", new_text) - assert store.read_text("new.txt") == new_text + store.write_text_sync("new.txt", new_text) + assert store.read_text_sync("new.txt") == new_text # Test write bytes new_bytes = b"New binary content" - store.write_bytes("new.bin", new_bytes) - assert store.read_bytes("new.bin") == new_bytes + store.write_bytes_sync("new.bin", new_bytes) + assert store.read_bytes_sync("new.bin") == new_bytes @pytest.mark.xdist_group("storage") @@ -142,7 +142,7 @@ def test_local_store_listing_operations(local_test_setup: Path) -> None: store = LocalStore(str(local_test_setup)) # Test list_objects - objects = store.list_objects() + objects = store.list_objects_sync() assert "test.txt" in objects assert "test.bin" in objects assert "subdir/nested.txt" in objects @@ -218,14 +218,14 @@ def test_fsspec_s3_basic_operations( # Test write and read text test_path = "integration_test/test.txt" - fsspec_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) + fsspec_s3_backend.write_text_sync(test_path, TEST_TEXT_CONTENT) - content = fsspec_s3_backend.read_text(test_path) + content = fsspec_s3_backend.read_text_sync(test_path) assert content == TEST_TEXT_CONTENT # Test exists - assert fsspec_s3_backend.exists(test_path) - assert not fsspec_s3_backend.exists("nonexistent.txt") + assert fsspec_s3_backend.exists_sync(test_path) + assert not fsspec_s3_backend.exists_sync("nonexistent.txt") @pytest.mark.xdist_group("storage") @@ -233,9 +233,9 @@ def test_fsspec_s3_basic_operations( def test_fsspec_s3_binary_operations(fsspec_s3_backend: "ObjectStoreProtocol") -> None: """Test FSSpec S3 backend binary operations.""" test_path = "integration_test/binary.bin" - fsspec_s3_backend.write_bytes(test_path, TEST_BINARY_CONTENT) + fsspec_s3_backend.write_bytes_sync(test_path, TEST_BINARY_CONTENT) - content = fsspec_s3_backend.read_bytes(test_path) + content = fsspec_s3_backend.read_bytes_sync(test_path) assert content == TEST_BINARY_CONTENT @@ -262,10 +262,10 @@ def test_fsspec_s3_listing_operations(fsspec_s3_backend: "ObjectStoreProtocol") # Write multiple test files test_files = ["list_test/file1.txt", "list_test/file2.txt", "list_test/subdir/file3.txt"] for file_path in test_files: - fsspec_s3_backend.write_text(file_path, f"Content of {file_path}") + fsspec_s3_backend.write_text_sync(file_path, f"Content of {file_path}") # Test list_objects - objects = fsspec_s3_backend.list_objects("list_test/") + objects = fsspec_s3_backend.list_objects_sync("list_test/") assert len(objects) >= 3 assert any("file1.txt" in obj for obj in objects) assert any("file2.txt" in obj for obj in objects) @@ -282,20 +282,20 @@ def test_fsspec_s3_copy_move_operations(fsspec_s3_backend: "ObjectStoreProtocol" move_source_path = "move_test/source.txt" move_dest_path = "move_test/moved.txt" - fsspec_s3_backend.write_text(source_path, TEST_TEXT_CONTENT) - fsspec_s3_backend.write_text(move_source_path, TEST_TEXT_CONTENT) + fsspec_s3_backend.write_text_sync(source_path, TEST_TEXT_CONTENT) + fsspec_s3_backend.write_text_sync(move_source_path, TEST_TEXT_CONTENT) # Test copy - fsspec_s3_backend.copy(source_path, copy_path) - assert fsspec_s3_backend.exists(source_path) # Original should still exist - assert fsspec_s3_backend.exists(copy_path) - assert fsspec_s3_backend.read_text(copy_path) == TEST_TEXT_CONTENT + fsspec_s3_backend.copy_sync(source_path, copy_path) + assert fsspec_s3_backend.exists_sync(source_path) # Original should still exist + assert fsspec_s3_backend.exists_sync(copy_path) + assert fsspec_s3_backend.read_text_sync(copy_path) == TEST_TEXT_CONTENT # Test move - fsspec_s3_backend.move(move_source_path, move_dest_path) - assert not fsspec_s3_backend.exists(move_source_path) # Original should be gone - assert fsspec_s3_backend.exists(move_dest_path) - assert fsspec_s3_backend.read_text(move_dest_path) == TEST_TEXT_CONTENT + fsspec_s3_backend.move_sync(move_source_path, move_dest_path) + assert not fsspec_s3_backend.exists_sync(move_source_path) # Original should be gone + assert fsspec_s3_backend.exists_sync(move_dest_path) + assert fsspec_s3_backend.read_text_sync(move_dest_path) == TEST_TEXT_CONTENT # ObStore S3 backend tests @@ -315,12 +315,12 @@ def test_obstore_s3_basic_operations( test_path = "integration_test/obstore_test.txt" # Test write and read - obstore_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) - content = obstore_s3_backend.read_text(test_path) + obstore_s3_backend.write_text_sync(test_path, TEST_TEXT_CONTENT) + content = obstore_s3_backend.read_text_sync(test_path) assert content == TEST_TEXT_CONTENT # Test exists - assert obstore_s3_backend.exists(test_path) + assert obstore_s3_backend.exists_sync(test_path) @pytest.mark.xdist_group("storage") @@ -329,8 +329,8 @@ def test_obstore_s3_binary_operations(obstore_s3_backend: "ObjectStoreProtocol") """Test ObStore S3 backend binary operations.""" test_path = "integration_test/obstore_binary.bin" - obstore_s3_backend.write_bytes(test_path, TEST_BINARY_CONTENT) - content = obstore_s3_backend.read_bytes(test_path) + obstore_s3_backend.write_bytes_sync(test_path, TEST_BINARY_CONTENT) + content = obstore_s3_backend.read_bytes_sync(test_path) assert content == TEST_BINARY_CONTENT @@ -356,10 +356,10 @@ def test_obstore_s3_listing_operations(obstore_s3_backend: "ObjectStoreProtocol" # Write test files in different paths test_files = ["obstore_list/file1.txt", "obstore_list/file2.txt", "obstore_list/subdir/file3.txt"] for file_path in test_files: - obstore_s3_backend.write_text(file_path, f"ObStore content of {file_path}") + obstore_s3_backend.write_text_sync(file_path, f"ObStore content of {file_path}") # Test list_objects - objects = obstore_s3_backend.list_objects("obstore_list/") + objects = obstore_s3_backend.list_objects_sync("obstore_list/") assert len(objects) >= 3 assert any("file1.txt" in obj for obj in objects) assert any("file2.txt" in obj for obj in objects) @@ -384,7 +384,7 @@ def test_registry_uri_resolution_local(tmp_path: "Path") -> None: # Registry prefers obstore for file:// URIs when available, otherwise LocalStore assert isinstance(backend, (ObStoreBackend, LocalStore)) - content = backend.read_text("registry_test.txt") + content = backend.read_text_sync("registry_test.txt") assert content == TEST_TEXT_CONTENT @@ -402,7 +402,7 @@ def test_registry_path_resolution(tmp_path: "Path") -> None: # Registry prefers obstore for local paths when available, otherwise LocalStore assert isinstance(backend, (ObStoreBackend, LocalStore)) - content = backend.read_text("path_test.txt") + content = backend.read_text_sync("path_test.txt") assert content == TEST_TEXT_CONTENT @@ -432,8 +432,8 @@ def test_registry_s3_fsspec_resolution( # Test basic operations test_path = "registry_fsspec_test.txt" - backend.write_text(test_path, TEST_TEXT_CONTENT) - content = backend.read_text(test_path) + backend.write_text_sync(test_path, TEST_TEXT_CONTENT) + content = backend.read_text_sync(test_path) assert content == TEST_TEXT_CONTENT @@ -459,8 +459,8 @@ def test_registry_alias_registration( assert isinstance(backend, (ObStoreBackend, LocalStore)) # Create test data - backend.write_text("alias_test.txt", TEST_TEXT_CONTENT) - content = backend.read_text("alias_test.txt") + backend.write_text_sync("alias_test.txt", TEST_TEXT_CONTENT) + content = backend.read_text_sync("alias_test.txt") assert content == TEST_TEXT_CONTENT # Register S3 alias if fsspec available @@ -482,8 +482,8 @@ def test_registry_alias_registration( assert isinstance(s3_backend, FSSpecBackend) # Test S3 alias operations - s3_backend.write_text("s3_alias_test.txt", TEST_TEXT_CONTENT) - s3_content = s3_backend.read_text("s3_alias_test.txt") + s3_backend.write_text_sync("s3_alias_test.txt", TEST_TEXT_CONTENT) + s3_content = s3_backend.read_text_sync("s3_alias_test.txt") assert s3_content == TEST_TEXT_CONTENT finally: @@ -557,12 +557,12 @@ def test_backend_consistency(request: pytest.FixtureRequest, backend_name: str) test_path = f"consistency_test_{backend_name}.txt" # Test write/read consistency - backend.write_text(test_path, TEST_TEXT_CONTENT) - content = backend.read_text(test_path) + backend.write_text_sync(test_path, TEST_TEXT_CONTENT) + content = backend.read_text_sync(test_path) assert content == TEST_TEXT_CONTENT # Test exists consistency - assert backend.exists(test_path) + assert backend.exists_sync(test_path) # Test URL signing consistency (only for backends that support signing) if backend.supports_signing: @@ -606,10 +606,10 @@ def test_local_backend_error_handling(tmp_path: "Path") -> None: # Test reading nonexistent file with pytest.raises(FileNotFoundError): - backend.read_text("nonexistent.txt") + backend.read_text_sync("nonexistent.txt") with pytest.raises(FileNotFoundError): - backend.read_bytes("nonexistent.txt") + backend.read_bytes_sync("nonexistent.txt") @pytest.mark.xdist_group("storage") @@ -633,7 +633,7 @@ def test_fsspec_s3_error_handling( # Test reading nonexistent file with pytest.raises(FileNotFoundInStorageError): - backend.read_text("nonexistent.txt") + backend.read_text_sync("nonexistent.txt") @pytest.mark.xdist_group("storage") @@ -769,13 +769,13 @@ def test_local_arrow_operations(tmp_path: "Path") -> None: # Test write/read Arrow table arrow_path = "arrow_test.parquet" - backend.write_arrow(arrow_path, table) + backend.write_arrow_sync(arrow_path, table) - read_table = backend.read_arrow(arrow_path) + read_table = backend.read_arrow_sync(arrow_path) assert read_table.equals(table) # Test exists for Arrow file - assert backend.exists(arrow_path) + assert backend.exists_sync(arrow_path) @pytest.mark.xdist_group("storage") @@ -811,9 +811,9 @@ def test_fsspec_s3_arrow_operations( # Test S3 Arrow operations s3_arrow_path = "s3_arrow_test.parquet" - backend.write_arrow(s3_arrow_path, table) + backend.write_arrow_sync(s3_arrow_path, table) - read_table = backend.read_arrow(s3_arrow_path) + read_table = backend.read_arrow_sync(s3_arrow_path) assert read_table.equals(table) @@ -833,15 +833,15 @@ def test_local_backend_large_file_operations(tmp_path: "Path") -> None: # Test large text operations large_text_path = "large_test.txt" - backend.write_text(large_text_path, large_text) - read_content = backend.read_text(large_text_path) + backend.write_text_sync(large_text_path, large_text) + read_content = backend.read_text_sync(large_text_path) assert read_content == large_text assert len(read_content) == len(large_text) # Test large binary operations large_binary_path = "large_test.bin" - backend.write_bytes(large_binary_path, large_binary) - read_binary = backend.read_bytes(large_binary_path) + backend.write_bytes_sync(large_binary_path, large_binary) + read_binary = backend.read_bytes_sync(large_binary_path) assert read_binary == large_binary assert len(read_binary) == len(large_binary) @@ -871,7 +871,7 @@ async def write_test_file(index: int) -> None: # Verify all files exist for i in range(10): - assert backend.exists(f"concurrent_test_{i}.txt") + assert backend.exists_sync(f"concurrent_test_{i}.txt") # Metadata tests @@ -886,19 +886,19 @@ def test_local_metadata_operations(tmp_path: "Path") -> None: # Create test file test_path = "metadata_test.txt" - backend.write_text(test_path, TEST_TEXT_CONTENT) + backend.write_text_sync(test_path, TEST_TEXT_CONTENT) # Test metadata retrieval - metadata = backend.get_metadata(test_path) + metadata = backend.get_metadata_sync(test_path) assert metadata is not None assert "size" in metadata assert metadata["size"] == len(TEST_TEXT_CONTENT.encode()) # Test metadata for binary file binary_path = "metadata_binary.bin" - backend.write_bytes(binary_path, TEST_BINARY_CONTENT) + backend.write_bytes_sync(binary_path, TEST_BINARY_CONTENT) - binary_metadata = backend.get_metadata(binary_path) + binary_metadata = backend.get_metadata_sync(binary_path) assert binary_metadata is not None assert binary_metadata["size"] == len(TEST_BINARY_CONTENT) @@ -924,8 +924,8 @@ def test_fsspec_s3_metadata_operations( # Test S3 metadata test_path = "s3_metadata_test.txt" - backend.write_text(test_path, TEST_TEXT_CONTENT) + backend.write_text_sync(test_path, TEST_TEXT_CONTENT) - metadata = backend.get_metadata(test_path) + metadata = backend.get_metadata_sync(test_path) assert metadata is not None assert "size" in metadata diff --git a/tests/integration/storage/test_signing.py b/tests/integration/storage/test_signing.py index 4ff05788..655d1fb8 100644 --- a/tests/integration/storage/test_signing.py +++ b/tests/integration/storage/test_signing.py @@ -52,7 +52,7 @@ def test_obstore_s3_supports_signing(obstore_s3_backend: "ObjectStoreProtocol") def test_obstore_s3_sign_sync_single_path_returns_string(obstore_s3_backend: "ObjectStoreProtocol") -> None: """Test sign_sync with single path returns a string URL.""" test_path = "signing_test/single_path.txt" - obstore_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) + obstore_s3_backend.write_text_sync(test_path, TEST_TEXT_CONTENT) signed_url = obstore_s3_backend.sign_sync(test_path) @@ -67,7 +67,7 @@ def test_obstore_s3_sign_sync_list_paths_returns_list(obstore_s3_backend: "Objec """Test sign_sync with list of paths returns list of URLs.""" test_paths = ["signing_test/list_path1.txt", "signing_test/list_path2.txt"] for path in test_paths: - obstore_s3_backend.write_text(path, TEST_TEXT_CONTENT) + obstore_s3_backend.write_text_sync(path, TEST_TEXT_CONTENT) signed_urls = obstore_s3_backend.sign_sync(test_paths) @@ -93,7 +93,7 @@ def test_obstore_s3_sign_sync_empty_list_returns_empty_list(obstore_s3_backend: def test_obstore_s3_sign_sync_with_custom_expires_in(obstore_s3_backend: "ObjectStoreProtocol") -> None: """Test sign_sync with custom expiration time.""" test_path = "signing_test/custom_expires.txt" - obstore_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) + obstore_s3_backend.write_text_sync(test_path, TEST_TEXT_CONTENT) signed_url = obstore_s3_backend.sign_sync(test_path, expires_in=7200) @@ -118,7 +118,7 @@ def test_obstore_s3_sign_sync_for_upload(obstore_s3_backend: "ObjectStoreProtoco def test_obstore_s3_sign_sync_max_expires_validation(obstore_s3_backend: "ObjectStoreProtocol") -> None: """Test sign_sync raises ValueError when expires_in exceeds maximum.""" test_path = "signing_test/max_expires.txt" - obstore_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) + obstore_s3_backend.write_text_sync(test_path, TEST_TEXT_CONTENT) max_expires = 604800 # 7 days with pytest.raises(ValueError, match="exceed"): @@ -191,7 +191,7 @@ async def test_obstore_s3_sign_async_max_expires_validation(obstore_s3_backend: def test_obstore_s3_signed_url_contains_signature_params(obstore_s3_backend: "ObjectStoreProtocol") -> None: """Test that signed URL contains AWS signature parameters.""" test_path = "signing_test/sig_params.txt" - obstore_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) + obstore_s3_backend.write_text_sync(test_path, TEST_TEXT_CONTENT) signed_url = obstore_s3_backend.sign_sync(test_path) @@ -204,7 +204,7 @@ def test_obstore_s3_different_paths_produce_different_urls(obstore_s3_backend: " """Test that different paths produce different signed URLs.""" paths = ["signing_test/path_a.txt", "signing_test/path_b.txt"] for path in paths: - obstore_s3_backend.write_text(path, TEST_TEXT_CONTENT) + obstore_s3_backend.write_text_sync(path, TEST_TEXT_CONTENT) url_a = obstore_s3_backend.sign_sync(paths[0]) url_b = obstore_s3_backend.sign_sync(paths[1]) @@ -220,7 +220,7 @@ def test_obstore_s3_sign_preserves_path_order_in_list(obstore_s3_backend: "Objec """Test that signed URLs preserve order of input paths.""" paths = [f"signing_test/order_{i}.txt" for i in range(5)] for path in paths: - obstore_s3_backend.write_text(path, TEST_TEXT_CONTENT) + obstore_s3_backend.write_text_sync(path, TEST_TEXT_CONTENT) signed_urls = obstore_s3_backend.sign_sync(paths) @@ -233,7 +233,7 @@ def test_obstore_s3_sign_preserves_path_order_in_list(obstore_s3_backend: "Objec def test_obstore_s3_sign_with_special_characters_in_path(obstore_s3_backend: "ObjectStoreProtocol") -> None: """Test signing paths with special characters.""" test_path = "signing_test/file with spaces.txt" - obstore_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) + obstore_s3_backend.write_text_sync(test_path, TEST_TEXT_CONTENT) signed_url = obstore_s3_backend.sign_sync(test_path) @@ -246,7 +246,7 @@ def test_obstore_s3_sign_with_special_characters_in_path(obstore_s3_backend: "Ob def test_obstore_s3_sign_with_nested_path(obstore_s3_backend: "ObjectStoreProtocol") -> None: """Test signing deeply nested paths.""" test_path = "signing_test/level1/level2/level3/deep_file.txt" - obstore_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) + obstore_s3_backend.write_text_sync(test_path, TEST_TEXT_CONTENT) signed_url = obstore_s3_backend.sign_sync(test_path) diff --git a/tests/unit/storage/test_bridge.py b/tests/unit/storage/test_bridge.py index 90b27701..219a7f4d 100644 --- a/tests/unit/storage/test_bridge.py +++ b/tests/unit/storage/test_bridge.py @@ -337,7 +337,7 @@ class _Backend: def __init__(self) -> None: self.payloads: list[tuple[str, bytes]] = [] - def write_bytes(self, path: str, payload: bytes) -> None: + def write_bytes_sync(self, path: str, payload: bytes) -> None: self.payloads.append((path, payload)) backend = _Backend() diff --git a/tests/unit/storage/test_fsspec_backend.py b/tests/unit/storage/test_fsspec_backend.py index 8ad2fd98..3f5bb7dc 100644 --- a/tests/unit/storage/test_fsspec_backend.py +++ b/tests/unit/storage/test_fsspec_backend.py @@ -49,8 +49,8 @@ def test_write_and_read_bytes(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) test_data = b"test data content" - store.write_bytes("test_file.bin", test_data) - result = store.read_bytes("test_file.bin") + store.write_bytes_sync("test_file.bin", test_data) + result = store.read_bytes_sync("test_file.bin") assert result == test_data @@ -63,8 +63,8 @@ def test_write_and_read_text(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) test_text = "test text content\nwith multiple lines" - store.write_text("test_file.txt", test_text) - result = store.read_text("test_file.txt") + store.write_text_sync("test_file.txt", test_text) + result = store.read_text_sync("test_file.txt") assert result == test_text @@ -76,10 +76,10 @@ def test_exists(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) - assert not store.exists("nonexistent.txt") + assert not store.exists_sync("nonexistent.txt") - store.write_text("existing.txt", "content") - assert store.exists("existing.txt") + store.write_text_sync("existing.txt", "content") + assert store.exists_sync("existing.txt") @pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec missing") @@ -89,11 +89,11 @@ def test_delete(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) - store.write_text("to_delete.txt", "content") - assert store.exists("to_delete.txt") + store.write_text_sync("to_delete.txt", "content") + assert store.exists_sync("to_delete.txt") - store.delete("to_delete.txt") - assert not store.exists("to_delete.txt") + store.delete_sync("to_delete.txt") + assert not store.exists_sync("to_delete.txt") @pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec missing") @@ -104,11 +104,11 @@ def test_copy(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) original_content = "original content" - store.write_text("original.txt", original_content) - store.copy("original.txt", "copied.txt") + store.write_text_sync("original.txt", original_content) + store.copy_sync("original.txt", "copied.txt") - assert store.exists("copied.txt") - assert store.read_text("copied.txt") == original_content + assert store.exists_sync("copied.txt") + assert store.read_text_sync("copied.txt") == original_content @pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec missing") @@ -119,12 +119,12 @@ def test_move(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) original_content = "content to move" - store.write_text("original.txt", original_content) - store.move("original.txt", "moved.txt") + store.write_text_sync("original.txt", original_content) + store.move_sync("original.txt", "moved.txt") - assert not store.exists("original.txt") - assert store.exists("moved.txt") - assert store.read_text("moved.txt") == original_content + assert not store.exists_sync("original.txt") + assert store.exists_sync("moved.txt") + assert store.read_text_sync("moved.txt") == original_content @pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec missing") @@ -135,12 +135,12 @@ def test_list_objects(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) # Create test files - store.write_text("file1.txt", "content1") - store.write_text("file2.txt", "content2") - store.write_text("subdir/file3.txt", "content3") + store.write_text_sync("file1.txt", "content1") + store.write_text_sync("file2.txt", "content2") + store.write_text_sync("subdir/file3.txt", "content3") # List all objects - all_objects = store.list_objects() + all_objects = store.list_objects_sync() assert any("file1.txt" in obj for obj in all_objects) assert any("file2.txt" in obj for obj in all_objects) assert any("file3.txt" in obj for obj in all_objects) @@ -154,12 +154,12 @@ def test_glob(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) # Create test files - store.write_text("test1.sql", "SELECT 1") - store.write_text("test2.sql", "SELECT 2") - store.write_text("config.json", "{}") + store.write_text_sync("test1.sql", "SELECT 1") + store.write_text_sync("test2.sql", "SELECT 2") + store.write_text_sync("config.json", "{}") # Test glob patterns - sql_files = store.glob("*.sql") + sql_files = store.glob_sync("*.sql") assert any("test1.sql" in obj for obj in sql_files) assert any("test2.sql" in obj for obj in sql_files) assert not any("config.json" in obj for obj in sql_files) @@ -173,8 +173,8 @@ def test_get_metadata(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) test_content = "test content for metadata" - store.write_text("test_file.txt", test_content) - metadata = store.get_metadata("test_file.txt") + store.write_text_sync("test_file.txt", test_content) + metadata = store.get_metadata_sync("test_file.txt") assert "size" in metadata assert "exists" in metadata @@ -188,13 +188,13 @@ def test_is_object_and_is_path(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) - store.write_text("file.txt", "content") + store.write_text_sync("file.txt", "content") (tmp_path / "subdir").mkdir() - assert store.is_object("file.txt") - assert not store.is_object("subdir") - assert not store.is_path("file.txt") - assert store.is_path("subdir") + assert store.is_object_sync("file.txt") + assert not store.is_object_sync("subdir") + assert not store.is_path_sync("file.txt") + assert store.is_path_sync("subdir") @pytest.mark.skipif(not FSSPEC_INSTALLED or not PYARROW_INSTALLED, reason="fsspec or PyArrow missing") @@ -210,8 +210,8 @@ def test_write_and_read_arrow(tmp_path: Path) -> None: data: dict[str, Any] = {"id": [1, 2, 3], "name": ["Alice", "Bob", "Charlie"], "score": [95.5, 87.0, 92.3]} table = pa.table(data) - store.write_arrow("test_data.parquet", table) - result = store.read_arrow("test_data.parquet") + store.write_arrow_sync("test_data.parquet", table) + result = store.read_arrow_sync("test_data.parquet") assert result.equals(table) @@ -229,10 +229,10 @@ def test_stream_arrow(tmp_path: Path) -> None: data: dict[str, Any] = {"id": [1, 2, 3, 4, 5], "value": ["a", "b", "c", "d", "e"]} table = pa.table(data) - store.write_arrow("stream_test.parquet", table) + store.write_arrow_sync("stream_test.parquet", table) # Stream record batches - batches = list(store.stream_arrow("stream_test.parquet")) + batches = list(store.stream_arrow_sync("stream_test.parquet")) assert len(batches) > 0 # Verify we can read the data @@ -247,7 +247,7 @@ def test_sign_returns_uri(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) - store.write_text("test.txt", "content") + store.write_text_sync("test.txt", "content") # FSSpec backends do not support URL signing assert store.supports_signing is False @@ -472,13 +472,13 @@ def test_arrow_operations_without_pyarrow(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) with pytest.raises(MissingDependencyError, match="pyarrow"): - store.read_arrow("test.parquet") + store.read_arrow_sync("test.parquet") with pytest.raises(MissingDependencyError, match="pyarrow"): - store.write_arrow("test.parquet", None) # type: ignore + store.write_arrow_sync("test.parquet", None) # type: ignore with pytest.raises(MissingDependencyError, match="pyarrow"): - list(store.stream_arrow("*.parquet")) + list(store.stream_arrow_sync("*.parquet")) # Tests for file:// URI auto-derive base_path fix @@ -520,8 +520,8 @@ def test_file_uri_base_path_full_workflow(tmp_path: Path) -> None: # Write and read should work correctly test_data = b"test content" - store.write_bytes("test.bin", test_data) - result = store.read_bytes("test.bin") + store.write_bytes_sync("test.bin", test_data) + result = store.read_bytes_sync("test.bin") assert result == test_data # Verify file is in the correct location @@ -542,7 +542,7 @@ async def test_stream_read_async_does_not_block_event_loop(tmp_path: Path) -> No # Write a reasonably sized file test_data = b"x" * 100_000 - store.write_bytes("large_file.bin", test_data) + store.write_bytes_sync("large_file.bin", test_data) # Track if concurrent task runs during streaming concurrent_task_ran = False @@ -571,7 +571,7 @@ async def test_stream_read_async_respects_chunk_size(tmp_path: Path) -> None: store = FSSpecBackend("file", base_path=str(tmp_path)) test_data = b"x" * 10_000 - store.write_bytes("chunked_file.bin", test_data) + store.write_bytes_sync("chunked_file.bin", test_data) chunk_size = 1000 chunks = [chunk async for chunk in await store.stream_read_async("chunked_file.bin", chunk_size=chunk_size)] @@ -595,7 +595,7 @@ async def test_stream_read_async_with_file_uri_base_path(tmp_path: Path) -> None store = FSSpecBackend(f"file://{tmp_path}", base_path="data") test_data = b"streaming test data" - store.write_bytes("stream_test.bin", test_data) + store.write_bytes_sync("stream_test.bin", test_data) chunks = [chunk async for chunk in await store.stream_read_async("stream_test.bin")] diff --git a/tests/unit/storage/test_local_store.py b/tests/unit/storage/test_local_store.py index 1e1f8a3a..04552416 100644 --- a/tests/unit/storage/test_local_store.py +++ b/tests/unit/storage/test_local_store.py @@ -36,8 +36,8 @@ def test_write_and_read_bytes(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) test_data = b"test data content" - store.write_bytes("test_file.bin", test_data) - result = store.read_bytes("test_file.bin") + store.write_bytes_sync("test_file.bin", test_data) + result = store.read_bytes_sync("test_file.bin") assert result == test_data @@ -47,8 +47,8 @@ def test_write_and_read_text(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) test_text = "test text content\nwith multiple lines" - store.write_text("test_file.txt", test_text) - result = store.read_text("test_file.txt") + store.write_text_sync("test_file.txt", test_text) + result = store.read_text_sync("test_file.txt") assert result == test_text @@ -58,8 +58,8 @@ def test_write_and_read_text_custom_encoding(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) test_text = "test with ünicode" - store.write_text("test_file.txt", test_text, encoding="latin-1") - result = store.read_text("test_file.txt", encoding="latin-1") + store.write_text_sync("test_file.txt", test_text, encoding="latin-1") + result = store.read_text_sync("test_file.txt", encoding="latin-1") assert result == test_text @@ -68,21 +68,21 @@ def test_exists(tmp_path: Path) -> None: """Test exists operation.""" store = LocalStore(str(tmp_path)) - assert not store.exists("nonexistent.txt") + assert not store.exists_sync("nonexistent.txt") - store.write_text("existing.txt", "content") - assert store.exists("existing.txt") + store.write_text_sync("existing.txt", "content") + assert store.exists_sync("existing.txt") def test_delete(tmp_path: Path) -> None: """Test delete operation.""" store = LocalStore(str(tmp_path)) - store.write_text("to_delete.txt", "content") - assert store.exists("to_delete.txt") + store.write_text_sync("to_delete.txt", "content") + assert store.exists_sync("to_delete.txt") - store.delete("to_delete.txt") - assert not store.exists("to_delete.txt") + store.delete_sync("to_delete.txt") + assert not store.exists_sync("to_delete.txt") def test_copy(tmp_path: Path) -> None: @@ -90,11 +90,11 @@ def test_copy(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) original_content = "original content" - store.write_text("original.txt", original_content) - store.copy("original.txt", "copied.txt") + store.write_text_sync("original.txt", original_content) + store.copy_sync("original.txt", "copied.txt") - assert store.exists("copied.txt") - assert store.read_text("copied.txt") == original_content + assert store.exists_sync("copied.txt") + assert store.read_text_sync("copied.txt") == original_content def test_move(tmp_path: Path) -> None: @@ -102,12 +102,12 @@ def test_move(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) original_content = "content to move" - store.write_text("original.txt", original_content) - store.move("original.txt", "moved.txt") + store.write_text_sync("original.txt", original_content) + store.move_sync("original.txt", "moved.txt") - assert not store.exists("original.txt") - assert store.exists("moved.txt") - assert store.read_text("moved.txt") == original_content + assert not store.exists_sync("original.txt") + assert store.exists_sync("moved.txt") + assert store.read_text_sync("moved.txt") == original_content def test_list_objects(tmp_path: Path) -> None: @@ -115,12 +115,12 @@ def test_list_objects(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) # Create test files - store.write_text("file1.txt", "content1") - store.write_text("file2.txt", "content2") - store.write_text("subdir/file3.txt", "content3") + store.write_text_sync("file1.txt", "content1") + store.write_text_sync("file2.txt", "content2") + store.write_text_sync("subdir/file3.txt", "content3") # List all objects - all_objects = store.list_objects() + all_objects = store.list_objects_sync() assert "file1.txt" in all_objects assert "file2.txt" in all_objects assert "subdir/file3.txt" in all_objects @@ -131,12 +131,12 @@ def test_list_objects_with_prefix(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) # Create test files - store.write_text("prefix_file1.txt", "content1") - store.write_text("prefix_file2.txt", "content2") - store.write_text("other_file.txt", "content3") + store.write_text_sync("prefix_file1.txt", "content1") + store.write_text_sync("prefix_file2.txt", "content2") + store.write_text_sync("other_file.txt", "content3") # List with prefix - prefixed_objects = store.list_objects(prefix="prefix_") + prefixed_objects = store.list_objects_sync(prefix="prefix_") assert "prefix_file1.txt" in prefixed_objects assert "prefix_file2.txt" in prefixed_objects assert "other_file.txt" not in prefixed_objects @@ -147,13 +147,13 @@ def test_glob(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) # Create test files - store.write_text("test1.sql", "SELECT 1") - store.write_text("test2.sql", "SELECT 2") - store.write_text("config.json", "{}") - store.write_text("subdir/test3.sql", "SELECT 3") + store.write_text_sync("test1.sql", "SELECT 1") + store.write_text_sync("test2.sql", "SELECT 2") + store.write_text_sync("config.json", "{}") + store.write_text_sync("subdir/test3.sql", "SELECT 3") # Test glob patterns - sql_files = store.glob("*.sql") + sql_files = store.glob_sync("*.sql") assert "test1.sql" in sql_files assert "test2.sql" in sql_files assert "config.json" not in sql_files @@ -164,8 +164,8 @@ def test_get_metadata(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) test_content = "test content for metadata" - store.write_text("test_file.txt", test_content) - metadata = store.get_metadata("test_file.txt") + store.write_text_sync("test_file.txt", test_content) + metadata = store.get_metadata_sync("test_file.txt") assert "size" in metadata assert "modified" in metadata @@ -176,13 +176,13 @@ def test_is_object_and_is_path(tmp_path: Path) -> None: """Test is_object and is_path operations.""" store = LocalStore(str(tmp_path)) - store.write_text("file.txt", "content") + store.write_text_sync("file.txt", "content") (tmp_path / "subdir").mkdir() - assert store.is_object("file.txt") - assert not store.is_object("subdir") - assert not store.is_path("file.txt") - assert store.is_path("subdir") + assert store.is_object_sync("file.txt") + assert not store.is_object_sync("subdir") + assert not store.is_path_sync("file.txt") + assert store.is_path_sync("subdir") @pytest.mark.skipif(not PYARROW_INSTALLED, reason="PyArrow not installed") @@ -194,8 +194,8 @@ def test_write_and_read_arrow(tmp_path: Path) -> None: data: dict[str, Any] = {"id": [1, 2, 3], "name": ["Alice", "Bob", "Charlie"], "score": [95.5, 87.0, 92.3]} table = pa.table(data) - store.write_arrow("test_data.parquet", table) - result = store.read_arrow("test_data.parquet") + store.write_arrow_sync("test_data.parquet", table) + result = store.read_arrow_sync("test_data.parquet") assert result.equals(table) @@ -209,10 +209,10 @@ def test_stream_arrow(tmp_path: Path) -> None: data: dict[str, Any] = {"id": [1, 2, 3, 4, 5], "value": ["a", "b", "c", "d", "e"]} table = pa.table(data) - store.write_arrow("stream_test.parquet", table) + store.write_arrow_sync("stream_test.parquet", table) # Stream record batches - batches = list(store.stream_arrow("stream_test.parquet")) + batches = list(store.stream_arrow_sync("stream_test.parquet")) assert len(batches) > 0 # Verify we can read the data @@ -224,7 +224,7 @@ def test_sign_sync_raises_not_implemented(tmp_path: Path) -> None: """Test sign_sync raises NotImplementedError for local files.""" store = LocalStore(str(tmp_path)) - store.write_text("test.txt", "content") + store.write_text_sync("test.txt", "content") # Local storage does not support URL signing assert store.supports_signing is False @@ -239,7 +239,7 @@ def test_resolve_path_absolute(tmp_path: Path) -> None: # Absolute path should be returned as-is test_path = tmp_path / "test.txt" - store.write_text("test.txt", "content") + store.write_text_sync("test.txt", "content") resolved = store._resolve_path(str(test_path)) assert resolved == test_path @@ -259,12 +259,12 @@ def test_nested_directory_operations(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) # Write to nested path - store.write_text("level1/level2/file.txt", "nested content") - assert store.exists("level1/level2/file.txt") - assert store.read_text("level1/level2/file.txt") == "nested content" + store.write_text_sync("level1/level2/file.txt", "nested content") + assert store.exists_sync("level1/level2/file.txt") + assert store.read_text_sync("level1/level2/file.txt") == "nested content" # List should include nested files - objects = store.list_objects() + objects = store.list_objects_sync() assert "level1/level2/file.txt" in objects @@ -273,10 +273,10 @@ def test_file_not_found_errors(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) with pytest.raises(FileNotFoundError): - store.read_bytes("nonexistent.bin") + store.read_bytes_sync("nonexistent.bin") with pytest.raises(FileNotFoundError): - store.read_text("nonexistent.txt") + store.read_text_sync("nonexistent.txt") # Async tests @@ -437,13 +437,13 @@ def test_arrow_operations_without_pyarrow(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) with pytest.raises(MissingDependencyError, match="pyarrow"): - store.read_arrow("test.parquet") + store.read_arrow_sync("test.parquet") with pytest.raises(MissingDependencyError, match="pyarrow"): - store.write_arrow("test.parquet", None) # type: ignore + store.write_arrow_sync("test.parquet", None) # type: ignore with pytest.raises(MissingDependencyError, match="pyarrow"): - list(store.stream_arrow("*.parquet")) + list(store.stream_arrow_sync("*.parquet")) # Tests for base_path combination fix @@ -492,8 +492,8 @@ def test_base_path_combination_full_workflow(tmp_path: Path) -> None: # Write and read should work correctly test_data = b"test content" - store.write_bytes("test.bin", test_data) - result = store.read_bytes("test.bin") + store.write_bytes_sync("test.bin", test_data) + result = store.read_bytes_sync("test.bin") assert result == test_data # Verify file is in the correct location @@ -511,7 +511,7 @@ async def test_stream_read_async_does_not_block_event_loop(tmp_path: Path) -> No # Write a reasonably sized file test_data = b"x" * 100_000 - store.write_bytes("large_file.bin", test_data) + store.write_bytes_sync("large_file.bin", test_data) # Track if concurrent task runs during streaming concurrent_task_ran = False @@ -537,7 +537,7 @@ async def test_stream_read_async_respects_chunk_size(tmp_path: Path) -> None: store = LocalStore(str(tmp_path)) test_data = b"x" * 10_000 - store.write_bytes("chunked_file.bin", test_data) + store.write_bytes_sync("chunked_file.bin", test_data) chunk_size = 1000 chunks = [chunk async for chunk in await store.stream_read_async("chunked_file.bin", chunk_size=chunk_size)] @@ -558,7 +558,7 @@ async def test_stream_read_async_with_base_path(tmp_path: Path) -> None: store = LocalStore(f"file://{tmp_path}", base_path="data") test_data = b"streaming test data" - store.write_bytes("stream_test.bin", test_data) + store.write_bytes_sync("stream_test.bin", test_data) chunks = [chunk async for chunk in await store.stream_read_async("stream_test.bin")] diff --git a/tests/unit/storage/test_obstore_backend.py b/tests/unit/storage/test_obstore_backend.py index 6552de30..30b29c55 100644 --- a/tests/unit/storage/test_obstore_backend.py +++ b/tests/unit/storage/test_obstore_backend.py @@ -41,8 +41,8 @@ def test_write_and_read_bytes(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") test_data = b"test data content" - store.write_bytes("test_file.bin", test_data) - result = store.read_bytes("test_file.bin") + store.write_bytes_sync("test_file.bin", test_data) + result = store.read_bytes_sync("test_file.bin") assert result == test_data @@ -55,8 +55,8 @@ def test_write_and_read_text(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") test_text = "test text content\nwith multiple lines" - store.write_text("test_file.txt", test_text) - result = store.read_text("test_file.txt") + store.write_text_sync("test_file.txt", test_text) + result = store.read_text_sync("test_file.txt") assert result == test_text @@ -68,10 +68,10 @@ def test_exists(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") - assert not store.exists("nonexistent.txt") + assert not store.exists_sync("nonexistent.txt") - store.write_text("existing.txt", "content") - assert store.exists("existing.txt") + store.write_text_sync("existing.txt", "content") + assert store.exists_sync("existing.txt") @pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore missing") @@ -81,11 +81,11 @@ def test_delete(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") - store.write_text("to_delete.txt", "content") - assert store.exists("to_delete.txt") + store.write_text_sync("to_delete.txt", "content") + assert store.exists_sync("to_delete.txt") - store.delete("to_delete.txt") - assert not store.exists("to_delete.txt") + store.delete_sync("to_delete.txt") + assert not store.exists_sync("to_delete.txt") @pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore missing") @@ -96,11 +96,11 @@ def test_copy(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") original_content = "original content" - store.write_text("original.txt", original_content) - store.copy("original.txt", "copied.txt") + store.write_text_sync("original.txt", original_content) + store.copy_sync("original.txt", "copied.txt") - assert store.exists("copied.txt") - assert store.read_text("copied.txt") == original_content + assert store.exists_sync("copied.txt") + assert store.read_text_sync("copied.txt") == original_content @pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore missing") @@ -111,12 +111,12 @@ def test_move(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") original_content = "content to move" - store.write_text("original.txt", original_content) - store.move("original.txt", "moved.txt") + store.write_text_sync("original.txt", original_content) + store.move_sync("original.txt", "moved.txt") - assert not store.exists("original.txt") - assert store.exists("moved.txt") - assert store.read_text("moved.txt") == original_content + assert not store.exists_sync("original.txt") + assert store.exists_sync("moved.txt") + assert store.read_text_sync("moved.txt") == original_content @pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore missing") @@ -127,12 +127,12 @@ def test_list_objects(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") # Create test files - store.write_text("file1.txt", "content1") - store.write_text("file2.txt", "content2") - store.write_text("subdir/file3.txt", "content3") + store.write_text_sync("file1.txt", "content1") + store.write_text_sync("file2.txt", "content2") + store.write_text_sync("subdir/file3.txt", "content3") # List all objects - all_objects = store.list_objects() + all_objects = store.list_objects_sync() assert any("file1.txt" in obj for obj in all_objects) assert any("file2.txt" in obj for obj in all_objects) assert any("file3.txt" in obj for obj in all_objects) @@ -146,12 +146,12 @@ def test_glob(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") # Create test files - store.write_text("test1.sql", "SELECT 1") - store.write_text("test2.sql", "SELECT 2") - store.write_text("config.json", "{}") + store.write_text_sync("test1.sql", "SELECT 1") + store.write_text_sync("test2.sql", "SELECT 2") + store.write_text_sync("config.json", "{}") # Test glob patterns - sql_files = store.glob("*.sql") + sql_files = store.glob_sync("*.sql") assert any("test1.sql" in obj for obj in sql_files) assert any("test2.sql" in obj for obj in sql_files) assert not any("config.json" in obj for obj in sql_files) @@ -165,8 +165,8 @@ def test_get_metadata(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") test_content = "test content for metadata" - store.write_text("test_file.txt", test_content) - metadata = store.get_metadata("test_file.txt") + store.write_text_sync("test_file.txt", test_content) + metadata = store.get_metadata_sync("test_file.txt") assert "exists" in metadata assert metadata["exists"] is True @@ -179,14 +179,14 @@ def test_is_object_and_is_path(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") - store.write_text("file.txt", "content") + store.write_text_sync("file.txt", "content") # Create directory by writing file inside it - store.write_text("subdir/nested.txt", "content") + store.write_text_sync("subdir/nested.txt", "content") - assert store.is_object("file.txt") - assert not store.is_object("subdir") - assert not store.is_path("file.txt") - assert store.is_path("subdir") + assert store.is_object_sync("file.txt") + assert not store.is_object_sync("subdir") + assert not store.is_path_sync("file.txt") + assert store.is_path_sync("subdir") @pytest.mark.skipif(not OBSTORE_INSTALLED or not PYARROW_INSTALLED, reason="obstore or PyArrow missing") @@ -202,8 +202,8 @@ def test_write_and_read_arrow(tmp_path: Path) -> None: data: dict[str, Any] = {"id": [1, 2, 3], "name": ["Alice", "Bob", "Charlie"], "score": [95.5, 87.0, 92.3]} table = pa.table(data) - store.write_arrow("test_data.parquet", table) - result = store.read_arrow("test_data.parquet") + store.write_arrow_sync("test_data.parquet", table) + result = store.read_arrow_sync("test_data.parquet") assert result.equals(table) @@ -221,10 +221,10 @@ def test_stream_arrow(tmp_path: Path) -> None: data: dict[str, Any] = {"id": [1, 2, 3, 4, 5], "value": ["a", "b", "c", "d", "e"]} table = pa.table(data) - store.write_arrow("stream_test.parquet", table) + store.write_arrow_sync("stream_test.parquet", table) # Stream record batches - batches = list(store.stream_arrow("stream_test.parquet")) + batches = list(store.stream_arrow_sync("stream_test.parquet")) assert len(batches) > 0 # Verify we can read the data @@ -239,7 +239,7 @@ def test_sign_raises_not_implemented_for_local_files(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") - store.write_text("test.txt", "content") + store.write_text_sync("test.txt", "content") # Local file protocol does not support URL signing assert store.supports_signing is False @@ -462,13 +462,13 @@ def test_arrow_operations_without_pyarrow(tmp_path: Path) -> None: store = ObStoreBackend(f"file://{tmp_path}") with pytest.raises(MissingDependencyError, match="pyarrow"): - store.read_arrow("test.parquet") + store.read_arrow_sync("test.parquet") with pytest.raises(MissingDependencyError, match="pyarrow"): - store.write_arrow("test.parquet", None) # type: ignore + store.write_arrow_sync("test.parquet", None) # type: ignore with pytest.raises(MissingDependencyError, match="pyarrow"): - list(store.stream_arrow("*.parquet")) + list(store.stream_arrow_sync("*.parquet")) # Tests for base_path fixes (Issue #336) @@ -487,8 +487,8 @@ def test_file_uri_with_relative_base_path(tmp_path: Path) -> None: assert store._local_store_root == expected_root # Write and read back - store.write_bytes("test.txt", b"hello") - assert store.read_bytes("test.txt") == b"hello" + store.write_bytes_sync("test.txt", b"hello") + assert store.read_bytes_sync("test.txt") == b"hello" # Verify file is in correct physical location assert (tmp_path / subdir / "test.txt").exists() @@ -510,7 +510,7 @@ def test_file_uri_with_absolute_base_path_override(tmp_path: Path) -> None: assert store._local_store_root == str(override_path) # Operations should work in override location - store.write_bytes("test.txt", b"override content") + store.write_bytes_sync("test.txt", b"override content") assert (override_path / "test.txt").read_bytes() == b"override content" @@ -527,9 +527,9 @@ def test_file_uri_with_nested_base_path(tmp_path: Path) -> None: assert store._local_store_root == expected_root # Write and verify - store.write_bytes("deep.txt", b"deep content") + store.write_bytes_sync("deep.txt", b"deep content") assert (tmp_path / nested_path / "deep.txt").exists() - assert store.read_bytes("deep.txt") == b"deep content" + assert store.read_bytes_sync("deep.txt") == b"deep content" @pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore missing") diff --git a/tests/unit/storage/test_signing.py b/tests/unit/storage/test_signing.py index 264f9d1a..24de3310 100644 --- a/tests/unit/storage/test_signing.py +++ b/tests/unit/storage/test_signing.py @@ -75,7 +75,7 @@ def test_obstore_file_sign_sync_raises_not_implemented(tmp_path: Path) -> None: from sqlspec.storage.backends.obstore import ObStoreBackend store = ObStoreBackend(f"file://{tmp_path}") - store.write_text("test.txt", "content") + store.write_text_sync("test.txt", "content") with pytest.raises(NotImplementedError) as excinfo: store.sign_sync("test.txt") @@ -90,7 +90,7 @@ def test_obstore_memory_sign_sync_raises_not_implemented() -> None: from sqlspec.storage.backends.obstore import ObStoreBackend store = ObStoreBackend("memory://") - store.write_text("test.txt", "content") + store.write_text_sync("test.txt", "content") with pytest.raises(NotImplementedError) as excinfo: store.sign_sync("test.txt") @@ -133,7 +133,7 @@ def test_fsspec_sign_sync_raises_not_implemented(tmp_path: Path) -> None: from sqlspec.storage.backends.fsspec import FSSpecBackend store = FSSpecBackend("file", base_path=str(tmp_path)) - store.write_text("test.txt", "content") + store.write_text_sync("test.txt", "content") with pytest.raises(NotImplementedError) as excinfo: store.sign_sync("test.txt") @@ -161,7 +161,7 @@ def test_local_store_sign_sync_raises_not_implemented(tmp_path: Path) -> None: from sqlspec.storage.backends.local import LocalStore store = LocalStore(str(tmp_path)) - store.write_text("test.txt", "content") + store.write_text_sync("test.txt", "content") with pytest.raises(NotImplementedError) as excinfo: store.sign_sync("test.txt") @@ -188,8 +188,8 @@ def test_obstore_sign_sync_with_list_paths_raises_not_implemented(tmp_path: Path from sqlspec.storage.backends.obstore import ObStoreBackend store = ObStoreBackend(f"file://{tmp_path}") - store.write_text("test1.txt", "content1") - store.write_text("test2.txt", "content2") + store.write_text_sync("test1.txt", "content1") + store.write_text_sync("test2.txt", "content2") with pytest.raises(NotImplementedError): store.sign_sync(["test1.txt", "test2.txt"]) @@ -201,8 +201,8 @@ def test_fsspec_sign_sync_with_list_paths_raises_not_implemented(tmp_path: Path) from sqlspec.storage.backends.fsspec import FSSpecBackend store = FSSpecBackend("file", base_path=str(tmp_path)) - store.write_text("test1.txt", "content1") - store.write_text("test2.txt", "content2") + store.write_text_sync("test1.txt", "content1") + store.write_text_sync("test2.txt", "content2") with pytest.raises(NotImplementedError): store.sign_sync(["test1.txt", "test2.txt"]) @@ -213,8 +213,8 @@ def test_local_store_sign_sync_with_list_paths_raises_not_implemented(tmp_path: from sqlspec.storage.backends.local import LocalStore store = LocalStore(str(tmp_path)) - store.write_text("test1.txt", "content1") - store.write_text("test2.txt", "content2") + store.write_text_sync("test1.txt", "content1") + store.write_text_sync("test2.txt", "content2") with pytest.raises(NotImplementedError): store.sign_sync(["test1.txt", "test2.txt"]) @@ -370,6 +370,7 @@ def test_obstore_error_message_suggests_cloud_backends(tmp_path: Path) -> None: from sqlspec.storage.backends.obstore import ObStoreBackend store = ObStoreBackend(f"file://{tmp_path}") + store.write_text_sync("test.txt", "content") with pytest.raises(NotImplementedError) as excinfo: store.sign_sync("test.txt") @@ -386,6 +387,7 @@ def test_fsspec_error_message_suggests_obstore_alternative(tmp_path: Path) -> No from sqlspec.storage.backends.fsspec import FSSpecBackend store = FSSpecBackend("file", base_path=str(tmp_path)) + store.write_text_sync("test.txt", "content") with pytest.raises(NotImplementedError) as excinfo: store.sign_sync("test.txt") @@ -399,6 +401,7 @@ def test_local_store_error_message_mentions_file_uri(tmp_path: Path) -> None: from sqlspec.storage.backends.local import LocalStore store = LocalStore(str(tmp_path)) + store.write_text_sync("test.txt", "content") with pytest.raises(NotImplementedError) as excinfo: store.sign_sync("test.txt") diff --git a/tests/unit/storage/test_storage_registry.py b/tests/unit/storage/test_storage_registry.py index fe73f0e6..01192610 100644 --- a/tests/unit/storage/test_storage_registry.py +++ b/tests/unit/storage/test_storage_registry.py @@ -151,8 +151,8 @@ def test_register_alias_with_base_path(tmp_path: Path) -> None: registry.register_alias("test_store", f"file://{tmp_path}/data") backend = registry.get("test_store") - backend.write_text("test.txt", "content") - assert backend.exists("test.txt") + backend.write_text_sync("test.txt", "content") + assert backend.exists_sync("test.txt") def test_register_alias_with_backend_override(tmp_path: Path) -> None: diff --git a/tests/unit/utils/test_fixtures.py b/tests/unit/utils/test_fixtures.py index eba2dd5c..b7d32191 100644 --- a/tests/unit/utils/test_fixtures.py +++ b/tests/unit/utils/test_fixtures.py @@ -387,7 +387,7 @@ def test_write_fixture_with_custom_backend(mock_registry: Mock) -> None: # Verify storage backend was called correctly mock_registry.get.assert_called_once_with("s3://bucket", custom_param="value") - mock_storage.write_text.assert_called_once() + mock_storage.write_text_sync.assert_called_once() async def test_write_fixture_async_dict(tmp_path: Path) -> None: diff --git a/uv.lock b/uv.lock index 7df2d019..ec1e02b4 100644 --- a/uv.lock +++ b/uv.lock @@ -343,7 +343,7 @@ wheels = [ [[package]] name = "alembic" -version = "1.18.1" +version = "1.18.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, @@ -351,9 +351,9 @@ dependencies = [ { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/49/cc/aca263693b2ece99fa99a09b6d092acb89973eb2bb575faef1777e04f8b4/alembic-1.18.1.tar.gz", hash = "sha256:83ac6b81359596816fb3b893099841a0862f2117b2963258e965d70dc62fb866", size = 2044319, upload-time = "2026-01-14T18:53:14.907Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/93/07f5ba5d8e4f4049e864faa9d822bbbbfb6f3223a4ffb1376768ab9ee4b8/alembic-1.18.2.tar.gz", hash = "sha256:1c3ddb635f26efbc80b1b90c5652548202022d4e760f6a78d6d85959280e3684", size = 2048272, upload-time = "2026-01-28T21:23:30.914Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/36/cd9cb6101e81e39076b2fbe303bfa3c85ca34e55142b0324fcbf22c5c6e2/alembic-1.18.1-py3-none-any.whl", hash = "sha256:f1c3b0920b87134e851c25f1f7f236d8a332c34b75416802d06971df5d1b7810", size = 260973, upload-time = "2026-01-14T18:53:17.533Z" }, + { url = "https://files.pythonhosted.org/packages/1a/60/ced4277ccf61f91eb03c4ac9f63b9567eb814f9ab1cd7835f00fbd5d0c14/alembic-1.18.2-py3-none-any.whl", hash = "sha256:18a5f6448af4864cc308aadf33eb37c0116da9a60fd9bb3f31ccb1b522b4a9b9", size = 261953, upload-time = "2026-01-28T21:23:32.508Z" }, ] [[package]] @@ -1201,67 +1201,62 @@ toml = [ [[package]] name = "cryptography" -version = "46.0.3" +version = "46.0.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, - { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, - { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, - { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, - { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, - { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, - { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, - { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, - { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, - { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, - { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, - { url = "https://files.pythonhosted.org/packages/d9/cd/1a8633802d766a0fa46f382a77e096d7e209e0817892929655fe0586ae32/cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32", size = 3689163, upload-time = "2025-10-15T23:18:13.821Z" }, - { url = "https://files.pythonhosted.org/packages/4c/59/6b26512964ace6480c3e54681a9859c974172fb141c38df11eadd8416947/cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c", size = 3429474, upload-time = "2025-10-15T23:18:15.477Z" }, - { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, - { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, - { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, - { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, - { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/78/19/f748958276519adf6a0c1e79e7b8860b4830dda55ccdf29f2719b5fc499c/cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59", size = 749301, upload-time = "2026-01-28T00:24:37.379Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/99/157aae7949a5f30d51fcb1a9851e8ebd5c74bf99b5285d8bb4b8b9ee641e/cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485", size = 7173686, upload-time = "2026-01-28T00:23:07.515Z" }, + { url = "https://files.pythonhosted.org/packages/87/91/874b8910903159043b5c6a123b7e79c4559ddd1896e38967567942635778/cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc", size = 4275871, upload-time = "2026-01-28T00:23:09.439Z" }, + { url = "https://files.pythonhosted.org/packages/c0/35/690e809be77896111f5b195ede56e4b4ed0435b428c2f2b6d35046fbb5e8/cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0", size = 4423124, upload-time = "2026-01-28T00:23:11.529Z" }, + { url = "https://files.pythonhosted.org/packages/1a/5b/a26407d4f79d61ca4bebaa9213feafdd8806dc69d3d290ce24996d3cfe43/cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa", size = 4277090, upload-time = "2026-01-28T00:23:13.123Z" }, + { url = "https://files.pythonhosted.org/packages/0c/d8/4bb7aec442a9049827aa34cee1aa83803e528fa55da9a9d45d01d1bb933e/cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81", size = 4947652, upload-time = "2026-01-28T00:23:14.554Z" }, + { url = "https://files.pythonhosted.org/packages/2b/08/f83e2e0814248b844265802d081f2fac2f1cbe6cd258e72ba14ff006823a/cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255", size = 4455157, upload-time = "2026-01-28T00:23:16.443Z" }, + { url = "https://files.pythonhosted.org/packages/0a/05/19d849cf4096448779d2dcc9bb27d097457dac36f7273ffa875a93b5884c/cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e", size = 3981078, upload-time = "2026-01-28T00:23:17.838Z" }, + { url = "https://files.pythonhosted.org/packages/e6/89/f7bac81d66ba7cde867a743ea5b37537b32b5c633c473002b26a226f703f/cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c", size = 4276213, upload-time = "2026-01-28T00:23:19.257Z" }, + { url = "https://files.pythonhosted.org/packages/da/9f/7133e41f24edd827020ad21b068736e792bc68eecf66d93c924ad4719fb3/cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32", size = 4912190, upload-time = "2026-01-28T00:23:21.244Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f7/6d43cbaddf6f65b24816e4af187d211f0bc536a29961f69faedc48501d8e/cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616", size = 4454641, upload-time = "2026-01-28T00:23:22.866Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4f/ebd0473ad656a0ac912a16bd07db0f5d85184924e14fc88feecae2492834/cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0", size = 4405159, upload-time = "2026-01-28T00:23:25.278Z" }, + { url = "https://files.pythonhosted.org/packages/d1/f7/7923886f32dc47e27adeff8246e976d77258fd2aa3efdd1754e4e323bf49/cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0", size = 4666059, upload-time = "2026-01-28T00:23:26.766Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a7/0fca0fd3591dffc297278a61813d7f661a14243dd60f499a7a5b48acb52a/cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5", size = 3026378, upload-time = "2026-01-28T00:23:28.317Z" }, + { url = "https://files.pythonhosted.org/packages/2d/12/652c84b6f9873f0909374864a57b003686c642ea48c84d6c7e2c515e6da5/cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b", size = 3478614, upload-time = "2026-01-28T00:23:30.275Z" }, + { url = "https://files.pythonhosted.org/packages/b9/27/542b029f293a5cce59349d799d4d8484b3b1654a7b9a0585c266e974a488/cryptography-46.0.4-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:485e2b65d25ec0d901bca7bcae0f53b00133bf3173916d8e421f6fddde103908", size = 7116417, upload-time = "2026-01-28T00:23:31.958Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f5/559c25b77f40b6bf828eabaf988efb8b0e17b573545edb503368ca0a2a03/cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da", size = 4264508, upload-time = "2026-01-28T00:23:34.264Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/551fa162d33074b660dc35c9bc3616fefa21a0e8c1edd27b92559902e408/cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829", size = 4409080, upload-time = "2026-01-28T00:23:35.793Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/4d8d129a755f5d6df1bbee69ea2f35ebfa954fa1847690d1db2e8bca46a5/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2", size = 4270039, upload-time = "2026-01-28T00:23:37.263Z" }, + { url = "https://files.pythonhosted.org/packages/4c/f5/ed3fcddd0a5e39321e595e144615399e47e7c153a1fb8c4862aec3151ff9/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:c92010b58a51196a5f41c3795190203ac52edfd5dc3ff99149b4659eba9d2085", size = 4926748, upload-time = "2026-01-28T00:23:38.884Z" }, + { url = "https://files.pythonhosted.org/packages/43/ae/9f03d5f0c0c00e85ecb34f06d3b79599f20630e4db91b8a6e56e8f83d410/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:829c2b12bbc5428ab02d6b7f7e9bbfd53e33efd6672d21341f2177470171ad8b", size = 4442307, upload-time = "2026-01-28T00:23:40.56Z" }, + { url = "https://files.pythonhosted.org/packages/8b/22/e0f9f2dae8040695103369cf2283ef9ac8abe4d51f68710bec2afd232609/cryptography-46.0.4-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:62217ba44bf81b30abaeda1488686a04a702a261e26f87db51ff61d9d3510abd", size = 3959253, upload-time = "2026-01-28T00:23:42.827Z" }, + { url = "https://files.pythonhosted.org/packages/01/5b/6a43fcccc51dae4d101ac7d378a8724d1ba3de628a24e11bf2f4f43cba4d/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:9c2da296c8d3415b93e6053f5a728649a87a48ce084a9aaf51d6e46c87c7f2d2", size = 4269372, upload-time = "2026-01-28T00:23:44.655Z" }, + { url = "https://files.pythonhosted.org/packages/17/b7/0f6b8c1dd0779df2b526e78978ff00462355e31c0a6f6cff8a3e99889c90/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:9b34d8ba84454641a6bf4d6762d15847ecbd85c1316c0a7984e6e4e9f748ec2e", size = 4891908, upload-time = "2026-01-28T00:23:46.48Z" }, + { url = "https://files.pythonhosted.org/packages/83/17/259409b8349aa10535358807a472c6a695cf84f106022268d31cea2b6c97/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f", size = 4441254, upload-time = "2026-01-28T00:23:48.403Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fe/e4a1b0c989b00cee5ffa0764401767e2d1cf59f45530963b894129fd5dce/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82", size = 4396520, upload-time = "2026-01-28T00:23:50.26Z" }, + { url = "https://files.pythonhosted.org/packages/b3/81/ba8fd9657d27076eb40d6a2f941b23429a3c3d2f56f5a921d6b936a27bc9/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c", size = 4651479, upload-time = "2026-01-28T00:23:51.674Z" }, + { url = "https://files.pythonhosted.org/packages/00/03/0de4ed43c71c31e4fe954edd50b9d28d658fef56555eba7641696370a8e2/cryptography-46.0.4-cp314-cp314t-win32.whl", hash = "sha256:c411f16275b0dea722d76544a61d6421e2cc829ad76eec79280dbdc9ddf50061", size = 3001986, upload-time = "2026-01-28T00:23:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/5c/70/81830b59df7682917d7a10f833c4dab2a5574cd664e86d18139f2b421329/cryptography-46.0.4-cp314-cp314t-win_amd64.whl", hash = "sha256:728fedc529efc1439eb6107b677f7f7558adab4553ef8669f0d02d42d7b959a7", size = 3468288, upload-time = "2026-01-28T00:23:55.09Z" }, + { url = "https://files.pythonhosted.org/packages/56/f7/f648fdbb61d0d45902d3f374217451385edc7e7768d1b03ff1d0e5ffc17b/cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab", size = 7169583, upload-time = "2026-01-28T00:23:56.558Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cc/8f3224cbb2a928de7298d6ed4790f5ebc48114e02bdc9559196bfb12435d/cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef", size = 4275419, upload-time = "2026-01-28T00:23:58.364Z" }, + { url = "https://files.pythonhosted.org/packages/17/43/4a18faa7a872d00e4264855134ba82d23546c850a70ff209e04ee200e76f/cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d", size = 4419058, upload-time = "2026-01-28T00:23:59.867Z" }, + { url = "https://files.pythonhosted.org/packages/ee/64/6651969409821d791ba12346a124f55e1b76f66a819254ae840a965d4b9c/cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973", size = 4278151, upload-time = "2026-01-28T00:24:01.731Z" }, + { url = "https://files.pythonhosted.org/packages/20/0b/a7fce65ee08c3c02f7a8310cc090a732344066b990ac63a9dfd0a655d321/cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4", size = 4939441, upload-time = "2026-01-28T00:24:03.175Z" }, + { url = "https://files.pythonhosted.org/packages/db/a7/20c5701e2cd3e1dfd7a19d2290c522a5f435dd30957d431dcb531d0f1413/cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af", size = 4451617, upload-time = "2026-01-28T00:24:05.403Z" }, + { url = "https://files.pythonhosted.org/packages/00/dc/3e16030ea9aa47b63af6524c354933b4fb0e352257c792c4deeb0edae367/cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263", size = 3977774, upload-time = "2026-01-28T00:24:06.851Z" }, + { url = "https://files.pythonhosted.org/packages/42/c8/ad93f14118252717b465880368721c963975ac4b941b7ef88f3c56bf2897/cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095", size = 4277008, upload-time = "2026-01-28T00:24:08.926Z" }, + { url = "https://files.pythonhosted.org/packages/00/cf/89c99698151c00a4631fbfcfcf459d308213ac29e321b0ff44ceeeac82f1/cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b", size = 4903339, upload-time = "2026-01-28T00:24:12.009Z" }, + { url = "https://files.pythonhosted.org/packages/03/c3/c90a2cb358de4ac9309b26acf49b2a100957e1ff5cc1e98e6c4996576710/cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019", size = 4451216, upload-time = "2026-01-28T00:24:13.975Z" }, + { url = "https://files.pythonhosted.org/packages/96/2c/8d7f4171388a10208671e181ca43cdc0e596d8259ebacbbcfbd16de593da/cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4", size = 4404299, upload-time = "2026-01-28T00:24:16.169Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/cbb2036e450980f65c6e0a173b73a56ff3bccd8998965dea5cc9ddd424a5/cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b", size = 4664837, upload-time = "2026-01-28T00:24:17.629Z" }, + { url = "https://files.pythonhosted.org/packages/0a/21/f7433d18fe6d5845329cbdc597e30caf983229c7a245bcf54afecc555938/cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc", size = 3009779, upload-time = "2026-01-28T00:24:20.198Z" }, + { url = "https://files.pythonhosted.org/packages/3a/6a/bd2e7caa2facffedf172a45c1a02e551e6d7d4828658c9a245516a598d94/cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976", size = 3466633, upload-time = "2026-01-28T00:24:21.851Z" }, + { url = "https://files.pythonhosted.org/packages/59/e0/f9c6c53e1f2a1c2507f00f2faba00f01d2f334b35b0fbfe5286715da2184/cryptography-46.0.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:766330cce7416c92b5e90c3bb71b1b79521760cdcfc3a6a1a182d4c9fab23d2b", size = 3476316, upload-time = "2026-01-28T00:24:24.144Z" }, + { url = "https://files.pythonhosted.org/packages/27/7a/f8d2d13227a9a1a9fe9c7442b057efecffa41f1e3c51d8622f26b9edbe8f/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c236a44acfb610e70f6b3e1c3ca20ff24459659231ef2f8c48e879e2d32b73da", size = 4216693, upload-time = "2026-01-28T00:24:25.758Z" }, + { url = "https://files.pythonhosted.org/packages/c5/de/3787054e8f7972658370198753835d9d680f6cd4a39df9f877b57f0dd69c/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8a15fb869670efa8f83cbffbc8753c1abf236883225aed74cd179b720ac9ec80", size = 4382765, upload-time = "2026-01-28T00:24:27.577Z" }, + { url = "https://files.pythonhosted.org/packages/8a/5f/60e0afb019973ba6a0b322e86b3d61edf487a4f5597618a430a2a15f2d22/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:fdc3daab53b212472f1524d070735b2f0c214239df131903bae1d598016fa822", size = 4216066, upload-time = "2026-01-28T00:24:29.056Z" }, + { url = "https://files.pythonhosted.org/packages/81/8e/bf4a0de294f147fee66f879d9bae6f8e8d61515558e3d12785dd90eca0be/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:44cc0675b27cadb71bdbb96099cca1fa051cd11d2ade09e5cd3a2edb929ed947", size = 4382025, upload-time = "2026-01-28T00:24:30.681Z" }, + { url = "https://files.pythonhosted.org/packages/79/f4/9ceb90cfd6a3847069b0b0b353fd3075dc69b49defc70182d8af0c4ca390/cryptography-46.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be8c01a7d5a55f9a47d1888162b76c8f49d62b234d88f0ff91a9fbebe32ffbc3", size = 3406043, upload-time = "2026-01-28T00:24:32.236Z" }, ] [[package]] @@ -1476,7 +1471,7 @@ name = "exceptiongroup" version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ @@ -1928,7 +1923,7 @@ wheels = [ [[package]] name = "google-cloud-aiplatform" -version = "1.134.0" +version = "1.135.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docstring-parser" }, @@ -1944,9 +1939,9 @@ dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d3/24/de4f21d0728d640b57bf7bbcd7460827a4daf9eaca61cb5b91be608c40bc/google_cloud_aiplatform-1.134.0.tar.gz", hash = "sha256:964cea117ca1ffc71742970e1091985adac72dfe76e1a1614a02a8cda50d6992", size = 9931075, upload-time = "2026-01-20T19:19:58.867Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/84/908cf03a1316c668766e538a210c5caaf2161ef638a7428aa47aee2a890e/google_cloud_aiplatform-1.135.0.tar.gz", hash = "sha256:1e42fc4c38147066ad05d93cb9208201514d359fb2a64663333cea2d1ec9ab42", size = 9941458, upload-time = "2026-01-28T00:25:48.179Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/f4/6863f3951eb07afd790fe6f8f1a5984224f7df836546a34ed29ab0cfe9af/google_cloud_aiplatform-1.134.0-py2.py3-none-any.whl", hash = "sha256:f249ae67d622deca486310e0021093764892ac357fb744b9e79548f490017ddc", size = 8189190, upload-time = "2026-01-20T19:19:55.997Z" }, + { url = "https://files.pythonhosted.org/packages/bb/66/d81fb4b81db3ee2f00f8b391f91cdb0e01d6886a2b78105f5d9b6c376104/google_cloud_aiplatform-1.135.0-py2.py3-none-any.whl", hash = "sha256:32b53ee61b3f51b14e21dc98fa9d9021c5db171cf7a407bd71abd3da46f5a6a4", size = 8200215, upload-time = "2026-01-28T00:25:45.202Z" }, ] [package.optional-dependencies] @@ -3826,7 +3821,7 @@ wheels = [ [[package]] name = "nbconvert" -version = "7.16.6" +version = "7.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4" }, @@ -3844,9 +3839,9 @@ dependencies = [ { name = "pygments" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/47/81f886b699450d0569f7bc551df2b1673d18df7ff25cc0c21ca36ed8a5ff/nbconvert-7.17.0.tar.gz", hash = "sha256:1b2696f1b5be12309f6c7d707c24af604b87dfaf6d950794c7b07acab96dda78", size = 862855, upload-time = "2026-01-29T16:37:48.478Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4b/8d5f796a792f8a25f6925a96032f098789f448571eb92011df1ae59e8ea8/nbconvert-7.17.0-py3-none-any.whl", hash = "sha256:4f99a63b337b9a23504347afdab24a11faa7d86b405e5c8f9881cd313336d518", size = 261510, upload-time = "2026-01-29T16:37:46.322Z" }, ] [[package]] @@ -4315,120 +4310,120 @@ wheels = [ [[package]] name = "oracledb" -version = "3.4.1" +version = "3.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/9d/4e86cd410294ebbb1f90a609aaae61c5fa064a5c10e501de3f4c67664e6c/oracledb-3.4.1.tar.gz", hash = "sha256:f5920df5ac9446579e8409607bba31dc2d23a2286a5b0ea17cb0d78d419392a6", size = 852693, upload-time = "2025-11-12T03:21:36.157Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/70/05645e72a67b45396a248a7949d89c91dc7a1ab5f7cedad110d9804e29d5/oracledb-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dfe18061f064d0455fad10d9301f6f92df9e32d18d75fb32802caf1ced4b304c", size = 4243226, upload-time = "2025-11-12T03:21:41.734Z" }, - { url = "https://files.pythonhosted.org/packages/7e/cc/f3a78ae31f87e41378c7bc60928fa5432d4eba80806cb0086edc11803a22/oracledb-3.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:84055d6fd093a4d7b8ed653f433531e4c4cc161f7261d78efd7f6a65a1f19444", size = 2426914, upload-time = "2025-11-12T03:21:43.641Z" }, - { url = "https://files.pythonhosted.org/packages/a6/a6/3d3dabbec2651851f13fdb7c318a3c50780090235d340d851f7cb8deeeec/oracledb-3.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9e20b6cd3245e84c30188874c524bb3c67c79b7a04fcb864e6ac39f55eae826", size = 2605903, upload-time = "2025-11-12T03:21:45.378Z" }, - { url = "https://files.pythonhosted.org/packages/ae/59/aa174fc8f5629b890424702edf582a8a635acaa0db1315b16160d703a887/oracledb-3.4.1-cp310-cp310-win32.whl", hash = "sha256:abedb0bf464bcf14d83e245eae000e03cad8ac68c945eb09cc46002d800fbf00", size = 1490352, upload-time = "2025-11-12T03:21:46.732Z" }, - { url = "https://files.pythonhosted.org/packages/8a/1c/9dded6efc747d8980667584c8464295d80d205f8a131e31cacfb274b6ed5/oracledb-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:4ee604bb0f3acb5680782818f973445b8cd168e72a73b5ca2cd9807140afadee", size = 1837541, upload-time = "2025-11-12T03:21:48.571Z" }, - { url = "https://files.pythonhosted.org/packages/ed/9e/5901349b8797fabc7c6f78230376bfbd5541a847f1eb34be23bfb971add7/oracledb-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:20b268be64994d0f636df9ff7613dcce420133f373d0d7fc84a31dd2f07322c0", size = 4226376, upload-time = "2025-11-12T03:21:49.959Z" }, - { url = "https://files.pythonhosted.org/packages/fc/c0/951d2ab8c04df9da309a82e211d19223a64dbbcfdd79f5f1aba6d8736408/oracledb-3.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d493946318d99a0f0e3f01d7c64c08ddae66f0aac735fa23c1eb94949d9db0f5", size = 2422323, upload-time = "2025-11-12T03:21:51.583Z" }, - { url = "https://files.pythonhosted.org/packages/a8/7c/82843dd7e55dec6331c0c7737e32523eb2f6156c6469055e2cb752e848f4/oracledb-3.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d64fda2fa5d3e82c58b2c5126ab5511bccb84f8b47eedfe9f17e9c100fe7683", size = 2601267, upload-time = "2025-11-12T03:21:52.978Z" }, - { url = "https://files.pythonhosted.org/packages/27/3f/67b50042f955574fca574a2234ba4af421e9268601bceb49efd9c43c6bc8/oracledb-3.4.1-cp311-cp311-win32.whl", hash = "sha256:cd80aa4c4dec7347c6d2909fbaf7e35a5253341ff2cb6f3782ab7ca712bf0405", size = 1488075, upload-time = "2025-11-12T03:21:54.704Z" }, - { url = "https://files.pythonhosted.org/packages/8d/14/bab071234d61e84c65712902dd0edec825d82b3198ffddc977c9ea9a91f3/oracledb-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e01e8696009cec4ebcb9fe678b23b8223595dc186c065899660cac4c1fc189b", size = 1843449, upload-time = "2025-11-12T03:21:56.342Z" }, - { url = "https://files.pythonhosted.org/packages/f7/d9/98367ba2c358de366de70b505531f9717cdfa7e29eff0c9ad113eecfce96/oracledb-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1c3f92c023ef1983e0e7f9a1b4a31df8568974c28c06ab0a574b1126e45083a8", size = 4222133, upload-time = "2025-11-12T03:21:58.212Z" }, - { url = "https://files.pythonhosted.org/packages/36/52/48ad2f7dae6288a2ddf0ac536d46ce4883d2d10ec7e16afbbd48f1ec0ff3/oracledb-3.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:251211d64b90cc42d00ec2d2893873bc02ff4bc22125e9fc5a7f148a6208fd88", size = 2230374, upload-time = "2025-11-12T03:21:59.656Z" }, - { url = "https://files.pythonhosted.org/packages/8d/08/60d4301b4f72f099ed2252f8d0eb143e6fe9e5c8f4c2705c3163cea36808/oracledb-3.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ea529a5e6036fae3e2bc195fa76b6f48cd9c431e68c74ef78ee6a5e39c855c39", size = 2421755, upload-time = "2025-11-12T03:22:01.543Z" }, - { url = "https://files.pythonhosted.org/packages/48/35/412a90019a030f5dff0c031319733c6b8dd477832bafa88b733b4b3ec57b/oracledb-3.4.1-cp312-cp312-win32.whl", hash = "sha256:94e8e6d63b45fedd4e243147cb25dea1a0f6599d83852f3979fe725a8533e85a", size = 1449688, upload-time = "2025-11-12T03:22:03.422Z" }, - { url = "https://files.pythonhosted.org/packages/7b/01/ae9eca3055dc625923564ca653ca99ddd8eda95e44953ce55c18aba55066/oracledb-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:84f15c483f9ec80dcded925df6ff473c69a293cd694d09b69abb911500659df4", size = 1794622, upload-time = "2025-11-12T03:22:04.941Z" }, - { url = "https://files.pythonhosted.org/packages/f0/4d/e32db901340dc6fc824d0d3b5e4660fe0199fba8adb0e81ac08b639c8ab9/oracledb-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ad817807b293e371c951af8ee67a56a5af88a5680a54fe79dfc7b9393ca128aa", size = 4206469, upload-time = "2025-11-12T03:22:06.881Z" }, - { url = "https://files.pythonhosted.org/packages/cf/68/1a038f29523eea19e42f4dd765bf523752408816b5ff21e8b998d8b25457/oracledb-3.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:34b9bc25eae217defa3f4b8289b4915cd1101aaeeec33c7bace74f927996d452", size = 2233055, upload-time = "2025-11-12T03:22:08.259Z" }, - { url = "https://files.pythonhosted.org/packages/b9/66/a51243553ac6b0e1bc2cfd4db8a2f3299b1b60c9231d7c9133ee1442d15b/oracledb-3.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be6575759ba56ab3758f82bfbb74f75288ce69190e19c087793050cb012c0aa1", size = 2443312, upload-time = "2025-11-12T03:22:09.615Z" }, - { url = "https://files.pythonhosted.org/packages/f7/57/a6056d4432c07a959fd1032dd45bfaff69b91ac7e1204dbccf7bf7b4a91d/oracledb-3.4.1-cp313-cp313-win32.whl", hash = "sha256:635587e5f28be83ec0bf72e4bfb2f3a4544c0f8e303f2327f376d57116894541", size = 1453553, upload-time = "2025-11-12T03:22:11.045Z" }, - { url = "https://files.pythonhosted.org/packages/6a/57/dca415d8dd18a2a030a9402d49039493cdce6acfd37c8a038a4ede2328e6/oracledb-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:354177708352e124c0f97ceccbe34be05e7f3ce7040a7dd3c2ebd857145ffe74", size = 1794005, upload-time = "2025-11-12T03:22:12.694Z" }, - { url = "https://files.pythonhosted.org/packages/59/07/dff7b9e6242b627d56f3fa6ad6639802003e1e5fbcc883d0ce27d82455ad/oracledb-3.4.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:3ec1f9dd7310da7cbf219c2a05bb52df08da950c95ad2ace8a289854947bdc6b", size = 4247946, upload-time = "2025-11-12T03:22:14.473Z" }, - { url = "https://files.pythonhosted.org/packages/1f/95/739868c6f312683cc3afe9534644b4ce2d054fe137d8f7a1e7786df9f5aa/oracledb-3.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:337a67d6c91015dfe7a2a1915f65c74adad26fcd428daaead296d91c92f09ad1", size = 2271628, upload-time = "2025-11-12T03:22:15.956Z" }, - { url = "https://files.pythonhosted.org/packages/fb/7c/307da513f5fb68e6454beb5bc1c715ec09a70d2af70a28b9fa6001c1b09b/oracledb-3.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d5ffe4dd26e8012de433ec69f93be5737d81b04324072ec36dad37eb778fd9d", size = 2455603, upload-time = "2025-11-12T03:22:18.112Z" }, - { url = "https://files.pythonhosted.org/packages/c5/1a/af5bd7239cebfc33541432cfcba75893a3f2f44fa66648e6d8ce1fe96b0c/oracledb-3.4.1-cp314-cp314-win32.whl", hash = "sha256:693ef5f8c420545511096b3bc9a3861617222717321bc78c776afbbb6c16c5b9", size = 1474932, upload-time = "2025-11-12T03:22:19.574Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ee/79d2ed18fd234bcbd407c1b36372dc898cf68de825ec650df7b1627acb51/oracledb-3.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:6adb483d7120cdd056173b71c901f71dbe2265c5bd402f768b0b1ab27af519b1", size = 1837566, upload-time = "2025-11-12T03:22:20.959Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/f7/02/70a872d1a4a739b4f7371ab8d3d5ed8c6e57e142e2503531aafcb220893c/oracledb-3.4.2.tar.gz", hash = "sha256:46e0f2278ff1fe83fbc33a3b93c72d429323ec7eed47bc9484e217776cd437e5", size = 855467, upload-time = "2026-01-28T17:25:39.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/5d/b8a0ca1c520fa43ae33260f6f8ca9bd468ade43da7986029bc214965df12/oracledb-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff3c89cecea62af8ca02aa33cab0f2edc0214c747eac7d3364ed6b2640cb55e4", size = 4243966, upload-time = "2026-01-28T17:25:45.05Z" }, + { url = "https://files.pythonhosted.org/packages/f6/43/26e2bbb2a6ee31392a339089e53cb2e386ca795ff4fbe2f673c167821bd6/oracledb-3.4.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e068ef844a327877bfefbef1bc6fb7284c727bb87af80095f08d95bcaf7b8bb2", size = 2426056, upload-time = "2026-01-28T17:25:47.176Z" }, + { url = "https://files.pythonhosted.org/packages/09/ba/11ee1d044295465a04ff45c6e3023d35400bb3f67bc5fed9408f0f2dc04c/oracledb-3.4.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9f434a739405557bd57cb39b62238142bb27855a524a70dc6d397a2a8c576c9d", size = 2603062, upload-time = "2026-01-28T17:25:49.817Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bc/292f2f5f7b65a667787871e300889ab8f4a3b9cfd88c5d78f828a40f6d31/oracledb-3.4.2-cp310-cp310-win32.whl", hash = "sha256:00c79448017f367bb7ab6900efe0706658a53768abea2b4519a4c9b2d5743890", size = 1496639, upload-time = "2026-01-28T17:25:51.298Z" }, + { url = "https://files.pythonhosted.org/packages/21/23/81931c16663e771937c0161bb90460668d2a5f7982b5030ab7bef3b3a4f9/oracledb-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:574c8280d49cbbe21dbe03fc28356d9b9a5b9e300ebcde6c6d106e51453a7e65", size = 1837314, upload-time = "2026-01-28T17:25:52.718Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/be263b668ba32b258d07c85f7bfb6967a9677e016c299207b28734f04c4b/oracledb-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b8e4b8a852251cef09038b75f30fce1227010835f4e19cfbd436027acba2697c", size = 4228552, upload-time = "2026-01-28T17:25:54.844Z" }, + { url = "https://files.pythonhosted.org/packages/91/bc/e832a649529da7c60409a81be41f3213b4c7ffda4fe424222b2145e8d43c/oracledb-3.4.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1617a1db020346883455af005efbefd51be2c4d797e43b1b38455a19f8526b48", size = 2421924, upload-time = "2026-01-28T17:25:56.984Z" }, + { url = "https://files.pythonhosted.org/packages/86/21/d867c37e493a63b5521bd248110ad5b97b18253d64a30703e3e8f3d9631e/oracledb-3.4.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed78d7e7079a778062744ccf42141ce4806818c3f4dd6463e4a7edd561c9f86", size = 2599301, upload-time = "2026-01-28T17:25:58.529Z" }, + { url = "https://files.pythonhosted.org/packages/2a/de/9b1843ea27f7791449652d7f340f042c3053336d2c11caf29e59bab86189/oracledb-3.4.2-cp311-cp311-win32.whl", hash = "sha256:0e16fe3d057e0c41a23ad2ae95bfa002401690773376d476be608f79ac74bf05", size = 1492890, upload-time = "2026-01-28T17:26:00.662Z" }, + { url = "https://files.pythonhosted.org/packages/d6/10/cbc8afa2db0cec80530858d3e4574f9734fae8c0b7f1df261398aa026c5f/oracledb-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:f93cae08e8ed20f2d5b777a8602a71f9418389c661d2c937e84d94863e7e7011", size = 1843355, upload-time = "2026-01-28T17:26:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/8f/81/2e6154f34b71cd93b4946c73ea13b69d54b8d45a5f6bbffe271793240d21/oracledb-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a7396664e592881225ba66385ee83ce339d864f39003d6e4ca31a894a7e7c552", size = 4220806, upload-time = "2026-01-28T17:26:04.322Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a9/a1d59aaac77d8f727156ec6a3b03399917c90b7da4f02d057f92e5601f56/oracledb-3.4.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f04a2d62073407672f114d02529921de0677c6883ed7c64d8d1a3c04caa3238", size = 2233795, upload-time = "2026-01-28T17:26:05.877Z" }, + { url = "https://files.pythonhosted.org/packages/94/ec/8c4a38020cd251572bd406ddcbde98ca052ec94b5684f9aa9ef1ddfcc68c/oracledb-3.4.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8d75e4f879b908be66cce05ba6c05791a5dbb4a15e39abc01aa25c8a2492bd9", size = 2424756, upload-time = "2026-01-28T17:26:07.35Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7d/c251c2a8567151ccfcfbe3467ea9a60fb5480dc4719342e2e6b7a9679e5d/oracledb-3.4.2-cp312-cp312-win32.whl", hash = "sha256:31b7ee83c23d0439778303de8a675717f805f7e8edb5556d48c4d8343bcf14f5", size = 1453486, upload-time = "2026-01-28T17:26:08.869Z" }, + { url = "https://files.pythonhosted.org/packages/4c/78/c939f3c16fb39400c4734d5a3340db5659ba4e9dce23032d7b33ccfd3fe5/oracledb-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:ac25a0448fc830fb7029ad50cd136cdbfcd06975d53967e269772cc5cb8c203a", size = 1794445, upload-time = "2026-01-28T17:26:10.66Z" }, + { url = "https://files.pythonhosted.org/packages/22/68/f7126f5d911c295b57720c6b1a0609a5a2667b4546946433552a4de46333/oracledb-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:643c25d301a289a371e37fcedb59e5fa5e54fb321708e5c12821c4b55bdd8a4d", size = 4205176, upload-time = "2026-01-28T17:26:12.463Z" }, + { url = "https://files.pythonhosted.org/packages/5d/93/2fced60f92dc82e66980a8a3ba5c1ea48110bf1dd81d030edb69d88f992e/oracledb-3.4.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55397e7eb43bb7017c03a981c736c25724182f5210951181dfe3fab0e5d457fb", size = 2231298, upload-time = "2026-01-28T17:26:14.497Z" }, + { url = "https://files.pythonhosted.org/packages/75/a7/4dd286f3a6348d786fef9e6ab2e6c9b74ca9195d9a756f2a67e45743cdf0/oracledb-3.4.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26a10f9c790bd141ffc8af68520803ed4a44a9258bf7d1eea9bfdd36bd6df7f", size = 2439430, upload-time = "2026-01-28T17:26:16.044Z" }, + { url = "https://files.pythonhosted.org/packages/19/28/94bc753e5e969c60ee5d9c914e2b4ef79999eaca8e91bcab2fbf0586b80b/oracledb-3.4.2-cp313-cp313-win32.whl", hash = "sha256:b974caec2c330c22bbe765705a5ac7d98ec3022811dec2042d561a3c65cb991b", size = 1458209, upload-time = "2026-01-28T17:26:17.652Z" }, + { url = "https://files.pythonhosted.org/packages/cb/2b/593a9b2d4c12c9de3289e67d84fe023336d99f36ba51442a5a0f5ce6acf7/oracledb-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:3df8eee1410d25360599968b1625b000f10c5ae0e47274031a7842a9dc418890", size = 1793558, upload-time = "2026-01-28T17:26:19.914Z" }, + { url = "https://files.pythonhosted.org/packages/42/20/1e98f84c1555911c46b4fa870fbef2a80617bf7e0a5f178078ecf466c917/oracledb-3.4.2-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:59ad6438f56a25e8e1a4a3dd1b42235a5d09ab9ba417ff2ad14eae6596f3d06f", size = 4247459, upload-time = "2026-01-28T17:26:22.356Z" }, + { url = "https://files.pythonhosted.org/packages/7d/74/95963e2d94f84b9937a562a9a2529f72d050afbc2ffd88f6661e3a876f7d/oracledb-3.4.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:404ec1451d0448653ee074213b87d6c5bd65eaa74b50083ddf2c9c3e11c71c71", size = 2271749, upload-time = "2026-01-28T17:26:24.078Z" }, + { url = "https://files.pythonhosted.org/packages/82/89/38ce85148a246087795379ee52c5b20726a00a69c87ba6ec266bcdad30fc/oracledb-3.4.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:19fa80ef84f85ad74077aa626067bbe697e527bd39604b4209f9d86cb2876b89", size = 2452031, upload-time = "2026-01-28T17:26:26.08Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/51fe907fdec0267ad7c6e9a62998cbe878efcd168ea6e39f162fab62fdaa/oracledb-3.4.2-cp314-cp314-win32.whl", hash = "sha256:d7ce75c498bff758548ec6e4424ab4271aa257e5887cc436a54bc947fd46199a", size = 1480973, upload-time = "2026-01-28T17:26:27.584Z" }, + { url = "https://files.pythonhosted.org/packages/48/22/a37354f19786774e5e4041338043b516db060aacfdfcd5aca8bb92c2539a/oracledb-3.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:5d7befb014174c5ae11c3a08f5ed6668a25ab2335d8e7104dca70d54d54a5b3a", size = 1837756, upload-time = "2026-01-28T17:26:29.032Z" }, ] [[package]] name = "orjson" -version = "3.11.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/04/b8/333fdb27840f3bf04022d21b654a35f58e15407183aeb16f3b41aa053446/orjson-3.11.5.tar.gz", hash = "sha256:82393ab47b4fe44ffd0a7659fa9cfaacc717eb617c93cde83795f14af5c2e9d5", size = 5972347, upload-time = "2025-12-06T15:55:39.458Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/79/19/b22cf9dad4db20c8737041046054cbd4f38bb5a2d0e4bb60487832ce3d76/orjson-3.11.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:df9eadb2a6386d5ea2bfd81309c505e125cfc9ba2b1b99a97e60985b0b3665d1", size = 245719, upload-time = "2025-12-06T15:53:43.877Z" }, - { url = "https://files.pythonhosted.org/packages/03/2e/b136dd6bf30ef5143fbe76a4c142828b55ccc618be490201e9073ad954a1/orjson-3.11.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc70da619744467d8f1f49a8cadae5ec7bbe054e5232d95f92ed8737f8c5870", size = 132467, upload-time = "2025-12-06T15:53:45.379Z" }, - { url = "https://files.pythonhosted.org/packages/ae/fc/ae99bfc1e1887d20a0268f0e2686eb5b13d0ea7bbe01de2b566febcd2130/orjson-3.11.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:073aab025294c2f6fc0807201c76fdaed86f8fc4be52c440fb78fbb759a1ac09", size = 130702, upload-time = "2025-12-06T15:53:46.659Z" }, - { url = "https://files.pythonhosted.org/packages/6e/43/ef7912144097765997170aca59249725c3ab8ef6079f93f9d708dd058df5/orjson-3.11.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:835f26fa24ba0bb8c53ae2a9328d1706135b74ec653ed933869b74b6909e63fd", size = 135907, upload-time = "2025-12-06T15:53:48.487Z" }, - { url = "https://files.pythonhosted.org/packages/3f/da/24d50e2d7f4092ddd4d784e37a3fa41f22ce8ed97abc9edd222901a96e74/orjson-3.11.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667c132f1f3651c14522a119e4dd631fad98761fa960c55e8e7430bb2a1ba4ac", size = 139935, upload-time = "2025-12-06T15:53:49.88Z" }, - { url = "https://files.pythonhosted.org/packages/02/4a/b4cb6fcbfff5b95a3a019a8648255a0fac9b221fbf6b6e72be8df2361feb/orjson-3.11.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42e8961196af655bb5e63ce6c60d25e8798cd4dfbc04f4203457fa3869322c2e", size = 137541, upload-time = "2025-12-06T15:53:51.226Z" }, - { url = "https://files.pythonhosted.org/packages/a5/99/a11bd129f18c2377c27b2846a9d9be04acec981f770d711ba0aaea563984/orjson-3.11.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75412ca06e20904c19170f8a24486c4e6c7887dea591ba18a1ab572f1300ee9f", size = 139031, upload-time = "2025-12-06T15:53:52.309Z" }, - { url = "https://files.pythonhosted.org/packages/64/29/d7b77d7911574733a036bb3e8ad7053ceb2b7d6ea42208b9dbc55b23b9ed/orjson-3.11.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6af8680328c69e15324b5af3ae38abbfcf9cbec37b5346ebfd52339c3d7e8a18", size = 141622, upload-time = "2025-12-06T15:53:53.606Z" }, - { url = "https://files.pythonhosted.org/packages/93/41/332db96c1de76b2feda4f453e91c27202cd092835936ce2b70828212f726/orjson-3.11.5-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a86fe4ff4ea523eac8f4b57fdac319faf037d3c1be12405e6a7e86b3fbc4756a", size = 413800, upload-time = "2025-12-06T15:53:54.866Z" }, - { url = "https://files.pythonhosted.org/packages/76/e1/5a0d148dd1f89ad2f9651df67835b209ab7fcb1118658cf353425d7563e9/orjson-3.11.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e607b49b1a106ee2086633167033afbd63f76f2999e9236f638b06b112b24ea7", size = 151198, upload-time = "2025-12-06T15:53:56.383Z" }, - { url = "https://files.pythonhosted.org/packages/0d/96/8db67430d317a01ae5cf7971914f6775affdcfe99f5bff9ef3da32492ecc/orjson-3.11.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7339f41c244d0eea251637727f016b3d20050636695bc78345cce9029b189401", size = 141984, upload-time = "2025-12-06T15:53:57.746Z" }, - { url = "https://files.pythonhosted.org/packages/71/49/40d21e1aa1ac569e521069228bb29c9b5a350344ccf922a0227d93c2ed44/orjson-3.11.5-cp310-cp310-win32.whl", hash = "sha256:8be318da8413cdbbce77b8c5fac8d13f6eb0f0db41b30bb598631412619572e8", size = 135272, upload-time = "2025-12-06T15:53:59.769Z" }, - { url = "https://files.pythonhosted.org/packages/c4/7e/d0e31e78be0c100e08be64f48d2850b23bcb4d4c70d114f4e43b39f6895a/orjson-3.11.5-cp310-cp310-win_amd64.whl", hash = "sha256:b9f86d69ae822cabc2a0f6c099b43e8733dda788405cba2665595b7e8dd8d167", size = 133360, upload-time = "2025-12-06T15:54:01.25Z" }, - { url = "https://files.pythonhosted.org/packages/fd/68/6b3659daec3a81aed5ab47700adb1a577c76a5452d35b91c88efee89987f/orjson-3.11.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9c8494625ad60a923af6b2b0bd74107146efe9b55099e20d7740d995f338fcd8", size = 245318, upload-time = "2025-12-06T15:54:02.355Z" }, - { url = "https://files.pythonhosted.org/packages/e9/00/92db122261425f61803ccf0830699ea5567439d966cbc35856fe711bfe6b/orjson-3.11.5-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:7bb2ce0b82bc9fd1168a513ddae7a857994b780b2945a8c51db4ab1c4b751ebc", size = 129491, upload-time = "2025-12-06T15:54:03.877Z" }, - { url = "https://files.pythonhosted.org/packages/94/4f/ffdcb18356518809d944e1e1f77589845c278a1ebbb5a8297dfefcc4b4cb/orjson-3.11.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67394d3becd50b954c4ecd24ac90b5051ee7c903d167459f93e77fc6f5b4c968", size = 132167, upload-time = "2025-12-06T15:54:04.944Z" }, - { url = "https://files.pythonhosted.org/packages/97/c6/0a8caff96f4503f4f7dd44e40e90f4d14acf80d3b7a97cb88747bb712d3e/orjson-3.11.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:298d2451f375e5f17b897794bcc3e7b821c0f32b4788b9bcae47ada24d7f3cf7", size = 130516, upload-time = "2025-12-06T15:54:06.274Z" }, - { url = "https://files.pythonhosted.org/packages/4d/63/43d4dc9bd9954bff7052f700fdb501067f6fb134a003ddcea2a0bb3854ed/orjson-3.11.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa5e4244063db8e1d87e0f54c3f7522f14b2dc937e65d5241ef0076a096409fd", size = 135695, upload-time = "2025-12-06T15:54:07.702Z" }, - { url = "https://files.pythonhosted.org/packages/87/6f/27e2e76d110919cb7fcb72b26166ee676480a701bcf8fc53ac5d0edce32f/orjson-3.11.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1db2088b490761976c1b2e956d5d4e6409f3732e9d79cfa69f876c5248d1baf9", size = 139664, upload-time = "2025-12-06T15:54:08.828Z" }, - { url = "https://files.pythonhosted.org/packages/d4/f8/5966153a5f1be49b5fbb8ca619a529fde7bc71aa0a376f2bb83fed248bcd/orjson-3.11.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2ed66358f32c24e10ceea518e16eb3549e34f33a9d51f99ce23b0251776a1ef", size = 137289, upload-time = "2025-12-06T15:54:09.898Z" }, - { url = "https://files.pythonhosted.org/packages/a7/34/8acb12ff0299385c8bbcbb19fbe40030f23f15a6de57a9c587ebf71483fb/orjson-3.11.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2021afda46c1ed64d74b555065dbd4c2558d510d8cec5ea6a53001b3e5e82a9", size = 138784, upload-time = "2025-12-06T15:54:11.022Z" }, - { url = "https://files.pythonhosted.org/packages/ee/27/910421ea6e34a527f73d8f4ee7bdffa48357ff79c7b8d6eb6f7b82dd1176/orjson-3.11.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b42ffbed9128e547a1647a3e50bc88ab28ae9daa61713962e0d3dd35e820c125", size = 141322, upload-time = "2025-12-06T15:54:12.427Z" }, - { url = "https://files.pythonhosted.org/packages/87/a3/4b703edd1a05555d4bb1753d6ce44e1a05b7a6d7c164d5b332c795c63d70/orjson-3.11.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8d5f16195bb671a5dd3d1dbea758918bada8f6cc27de72bd64adfbd748770814", size = 413612, upload-time = "2025-12-06T15:54:13.858Z" }, - { url = "https://files.pythonhosted.org/packages/1b/36/034177f11d7eeea16d3d2c42a1883b0373978e08bc9dad387f5074c786d8/orjson-3.11.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c0e5d9f7a0227df2927d343a6e3859bebf9208b427c79bd31949abcc2fa32fa5", size = 150993, upload-time = "2025-12-06T15:54:15.189Z" }, - { url = "https://files.pythonhosted.org/packages/44/2f/ea8b24ee046a50a7d141c0227c4496b1180b215e728e3b640684f0ea448d/orjson-3.11.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23d04c4543e78f724c4dfe656b3791b5f98e4c9253e13b2636f1af5d90e4a880", size = 141774, upload-time = "2025-12-06T15:54:16.451Z" }, - { url = "https://files.pythonhosted.org/packages/8a/12/cc440554bf8200eb23348a5744a575a342497b65261cd65ef3b28332510a/orjson-3.11.5-cp311-cp311-win32.whl", hash = "sha256:c404603df4865f8e0afe981aa3c4b62b406e6d06049564d58934860b62b7f91d", size = 135109, upload-time = "2025-12-06T15:54:17.73Z" }, - { url = "https://files.pythonhosted.org/packages/a3/83/e0c5aa06ba73a6760134b169f11fb970caa1525fa4461f94d76e692299d9/orjson-3.11.5-cp311-cp311-win_amd64.whl", hash = "sha256:9645ef655735a74da4990c24ffbd6894828fbfa117bc97c1edd98c282ecb52e1", size = 133193, upload-time = "2025-12-06T15:54:19.426Z" }, - { url = "https://files.pythonhosted.org/packages/cb/35/5b77eaebc60d735e832c5b1a20b155667645d123f09d471db0a78280fb49/orjson-3.11.5-cp311-cp311-win_arm64.whl", hash = "sha256:1cbf2735722623fcdee8e712cbaaab9e372bbcb0c7924ad711b261c2eccf4a5c", size = 126830, upload-time = "2025-12-06T15:54:20.836Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a4/8052a029029b096a78955eadd68ab594ce2197e24ec50e6b6d2ab3f4e33b/orjson-3.11.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:334e5b4bff9ad101237c2d799d9fd45737752929753bf4faf4b207335a416b7d", size = 245347, upload-time = "2025-12-06T15:54:22.061Z" }, - { url = "https://files.pythonhosted.org/packages/64/67/574a7732bd9d9d79ac620c8790b4cfe0717a3d5a6eb2b539e6e8995e24a0/orjson-3.11.5-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:ff770589960a86eae279f5d8aa536196ebda8273a2a07db2a54e82b93bc86626", size = 129435, upload-time = "2025-12-06T15:54:23.615Z" }, - { url = "https://files.pythonhosted.org/packages/52/8d/544e77d7a29d90cf4d9eecd0ae801c688e7f3d1adfa2ebae5e1e94d38ab9/orjson-3.11.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed24250e55efbcb0b35bed7caaec8cedf858ab2f9f2201f17b8938c618c8ca6f", size = 132074, upload-time = "2025-12-06T15:54:24.694Z" }, - { url = "https://files.pythonhosted.org/packages/6e/57/b9f5b5b6fbff9c26f77e785baf56ae8460ef74acdb3eae4931c25b8f5ba9/orjson-3.11.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a66d7769e98a08a12a139049aac2f0ca3adae989817f8c43337455fbc7669b85", size = 130520, upload-time = "2025-12-06T15:54:26.185Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6d/d34970bf9eb33f9ec7c979a262cad86076814859e54eb9a059a52f6dc13d/orjson-3.11.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86cfc555bfd5794d24c6a1903e558b50644e5e68e6471d66502ce5cb5fdef3f9", size = 136209, upload-time = "2025-12-06T15:54:27.264Z" }, - { url = "https://files.pythonhosted.org/packages/e7/39/bc373b63cc0e117a105ea12e57280f83ae52fdee426890d57412432d63b3/orjson-3.11.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a230065027bc2a025e944f9d4714976a81e7ecfa940923283bca7bbc1f10f626", size = 139837, upload-time = "2025-12-06T15:54:28.75Z" }, - { url = "https://files.pythonhosted.org/packages/cb/aa/7c4818c8d7d324da220f4f1af55c343956003aa4d1ce1857bdc1d396ba69/orjson-3.11.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b29d36b60e606df01959c4b982729c8845c69d1963f88686608be9ced96dbfaa", size = 137307, upload-time = "2025-12-06T15:54:29.856Z" }, - { url = "https://files.pythonhosted.org/packages/46/bf/0993b5a056759ba65145effe3a79dd5a939d4a070eaa5da2ee3180fbb13f/orjson-3.11.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74099c6b230d4261fdc3169d50efc09abf38ace1a42ea2f9994b1d79153d477", size = 139020, upload-time = "2025-12-06T15:54:31.024Z" }, - { url = "https://files.pythonhosted.org/packages/65/e8/83a6c95db3039e504eda60fc388f9faedbb4f6472f5aba7084e06552d9aa/orjson-3.11.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e697d06ad57dd0c7a737771d470eedc18e68dfdefcdd3b7de7f33dfda5b6212e", size = 141099, upload-time = "2025-12-06T15:54:32.196Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b4/24fdc024abfce31c2f6812973b0a693688037ece5dc64b7a60c1ce69e2f2/orjson-3.11.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e08ca8a6c851e95aaecc32bc44a5aa75d0ad26af8cdac7c77e4ed93acf3d5b69", size = 413540, upload-time = "2025-12-06T15:54:33.361Z" }, - { url = "https://files.pythonhosted.org/packages/d9/37/01c0ec95d55ed0c11e4cae3e10427e479bba40c77312b63e1f9665e0737d/orjson-3.11.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e8b5f96c05fce7d0218df3fdfeb962d6b8cfff7e3e20264306b46dd8b217c0f3", size = 151530, upload-time = "2025-12-06T15:54:34.6Z" }, - { url = "https://files.pythonhosted.org/packages/f9/d4/f9ebc57182705bb4bbe63f5bbe14af43722a2533135e1d2fb7affa0c355d/orjson-3.11.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddbfdb5099b3e6ba6d6ea818f61997bb66de14b411357d24c4612cf1ebad08ca", size = 141863, upload-time = "2025-12-06T15:54:35.801Z" }, - { url = "https://files.pythonhosted.org/packages/0d/04/02102b8d19fdcb009d72d622bb5781e8f3fae1646bf3e18c53d1bc8115b5/orjson-3.11.5-cp312-cp312-win32.whl", hash = "sha256:9172578c4eb09dbfcf1657d43198de59b6cef4054de385365060ed50c458ac98", size = 135255, upload-time = "2025-12-06T15:54:37.209Z" }, - { url = "https://files.pythonhosted.org/packages/d4/fb/f05646c43d5450492cb387de5549f6de90a71001682c17882d9f66476af5/orjson-3.11.5-cp312-cp312-win_amd64.whl", hash = "sha256:2b91126e7b470ff2e75746f6f6ee32b9ab67b7a93c8ba1d15d3a0caaf16ec875", size = 133252, upload-time = "2025-12-06T15:54:38.401Z" }, - { url = "https://files.pythonhosted.org/packages/dc/a6/7b8c0b26ba18c793533ac1cd145e131e46fcf43952aa94c109b5b913c1f0/orjson-3.11.5-cp312-cp312-win_arm64.whl", hash = "sha256:acbc5fac7e06777555b0722b8ad5f574739e99ffe99467ed63da98f97f9ca0fe", size = 126777, upload-time = "2025-12-06T15:54:39.515Z" }, - { url = "https://files.pythonhosted.org/packages/10/43/61a77040ce59f1569edf38f0b9faadc90c8cf7e9bec2e0df51d0132c6bb7/orjson-3.11.5-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:3b01799262081a4c47c035dd77c1301d40f568f77cc7ec1bb7db5d63b0a01629", size = 245271, upload-time = "2025-12-06T15:54:40.878Z" }, - { url = "https://files.pythonhosted.org/packages/55/f9/0f79be617388227866d50edd2fd320cb8fb94dc1501184bb1620981a0aba/orjson-3.11.5-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:61de247948108484779f57a9f406e4c84d636fa5a59e411e6352484985e8a7c3", size = 129422, upload-time = "2025-12-06T15:54:42.403Z" }, - { url = "https://files.pythonhosted.org/packages/77/42/f1bf1549b432d4a78bfa95735b79b5dac75b65b5bb815bba86ad406ead0a/orjson-3.11.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:894aea2e63d4f24a7f04a1908307c738d0dce992e9249e744b8f4e8dd9197f39", size = 132060, upload-time = "2025-12-06T15:54:43.531Z" }, - { url = "https://files.pythonhosted.org/packages/25/49/825aa6b929f1a6ed244c78acd7b22c1481fd7e5fda047dc8bf4c1a807eb6/orjson-3.11.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ddc21521598dbe369d83d4d40338e23d4101dad21dae0e79fa20465dbace019f", size = 130391, upload-time = "2025-12-06T15:54:45.059Z" }, - { url = "https://files.pythonhosted.org/packages/42/ec/de55391858b49e16e1aa8f0bbbb7e5997b7345d8e984a2dec3746d13065b/orjson-3.11.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cce16ae2f5fb2c53c3eafdd1706cb7b6530a67cc1c17abe8ec747f5cd7c0c51", size = 135964, upload-time = "2025-12-06T15:54:46.576Z" }, - { url = "https://files.pythonhosted.org/packages/1c/40/820bc63121d2d28818556a2d0a09384a9f0262407cf9fa305e091a8048df/orjson-3.11.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e46c762d9f0e1cfb4ccc8515de7f349abbc95b59cb5a2bd68df5973fdef913f8", size = 139817, upload-time = "2025-12-06T15:54:48.084Z" }, - { url = "https://files.pythonhosted.org/packages/09/c7/3a445ca9a84a0d59d26365fd8898ff52bdfcdcb825bcc6519830371d2364/orjson-3.11.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7345c759276b798ccd6d77a87136029e71e66a8bbf2d2755cbdde1d82e78706", size = 137336, upload-time = "2025-12-06T15:54:49.426Z" }, - { url = "https://files.pythonhosted.org/packages/9a/b3/dc0d3771f2e5d1f13368f56b339c6782f955c6a20b50465a91acb79fe961/orjson-3.11.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75bc2e59e6a2ac1dd28901d07115abdebc4563b5b07dd612bf64260a201b1c7f", size = 138993, upload-time = "2025-12-06T15:54:50.939Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a2/65267e959de6abe23444659b6e19c888f242bf7725ff927e2292776f6b89/orjson-3.11.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:54aae9b654554c3b4edd61896b978568c6daa16af96fa4681c9b5babd469f863", size = 141070, upload-time = "2025-12-06T15:54:52.414Z" }, - { url = "https://files.pythonhosted.org/packages/63/c9/da44a321b288727a322c6ab17e1754195708786a04f4f9d2220a5076a649/orjson-3.11.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4bdd8d164a871c4ec773f9de0f6fe8769c2d6727879c37a9666ba4183b7f8228", size = 413505, upload-time = "2025-12-06T15:54:53.67Z" }, - { url = "https://files.pythonhosted.org/packages/7f/17/68dc14fa7000eefb3d4d6d7326a190c99bb65e319f02747ef3ebf2452f12/orjson-3.11.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a261fef929bcf98a60713bf5e95ad067cea16ae345d9a35034e73c3990e927d2", size = 151342, upload-time = "2025-12-06T15:54:55.113Z" }, - { url = "https://files.pythonhosted.org/packages/c4/c5/ccee774b67225bed630a57478529fc026eda33d94fe4c0eac8fe58d4aa52/orjson-3.11.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c028a394c766693c5c9909dec76b24f37e6a1b91999e8d0c0d5feecbe93c3e05", size = 141823, upload-time = "2025-12-06T15:54:56.331Z" }, - { url = "https://files.pythonhosted.org/packages/67/80/5d00e4155d0cd7390ae2087130637671da713959bb558db9bac5e6f6b042/orjson-3.11.5-cp313-cp313-win32.whl", hash = "sha256:2cc79aaad1dfabe1bd2d50ee09814a1253164b3da4c00a78c458d82d04b3bdef", size = 135236, upload-time = "2025-12-06T15:54:57.507Z" }, - { url = "https://files.pythonhosted.org/packages/95/fe/792cc06a84808dbdc20ac6eab6811c53091b42f8e51ecebf14b540e9cfe4/orjson-3.11.5-cp313-cp313-win_amd64.whl", hash = "sha256:ff7877d376add4e16b274e35a3f58b7f37b362abf4aa31863dadacdd20e3a583", size = 133167, upload-time = "2025-12-06T15:54:58.71Z" }, - { url = "https://files.pythonhosted.org/packages/46/2c/d158bd8b50e3b1cfdcf406a7e463f6ffe3f0d167b99634717acdaf5e299f/orjson-3.11.5-cp313-cp313-win_arm64.whl", hash = "sha256:59ac72ea775c88b163ba8d21b0177628bd015c5dd060647bbab6e22da3aad287", size = 126712, upload-time = "2025-12-06T15:54:59.892Z" }, - { url = "https://files.pythonhosted.org/packages/c2/60/77d7b839e317ead7bb225d55bb50f7ea75f47afc489c81199befc5435b50/orjson-3.11.5-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e446a8ea0a4c366ceafc7d97067bfd55292969143b57e3c846d87fc701e797a0", size = 245252, upload-time = "2025-12-06T15:55:01.127Z" }, - { url = "https://files.pythonhosted.org/packages/f1/aa/d4639163b400f8044cef0fb9aa51b0337be0da3a27187a20d1166e742370/orjson-3.11.5-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:53deb5addae9c22bbe3739298f5f2196afa881ea75944e7720681c7080909a81", size = 129419, upload-time = "2025-12-06T15:55:02.723Z" }, - { url = "https://files.pythonhosted.org/packages/30/94/9eabf94f2e11c671111139edf5ec410d2f21e6feee717804f7e8872d883f/orjson-3.11.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cd00d49d6063d2b8791da5d4f9d20539c5951f965e45ccf4e96d33505ce68f", size = 132050, upload-time = "2025-12-06T15:55:03.918Z" }, - { url = "https://files.pythonhosted.org/packages/3d/c8/ca10f5c5322f341ea9a9f1097e140be17a88f88d1cfdd29df522970d9744/orjson-3.11.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3fd15f9fc8c203aeceff4fda211157fad114dde66e92e24097b3647a08f4ee9e", size = 130370, upload-time = "2025-12-06T15:55:05.173Z" }, - { url = "https://files.pythonhosted.org/packages/25/d4/e96824476d361ee2edd5c6290ceb8d7edf88d81148a6ce172fc00278ca7f/orjson-3.11.5-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df95000fbe6777bf9820ae82ab7578e8662051bb5f83d71a28992f539d2cda7", size = 136012, upload-time = "2025-12-06T15:55:06.402Z" }, - { url = "https://files.pythonhosted.org/packages/85/8e/9bc3423308c425c588903f2d103cfcfe2539e07a25d6522900645a6f257f/orjson-3.11.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a8d676748fca47ade5bc3da7430ed7767afe51b2f8100e3cd65e151c0eaceb", size = 139809, upload-time = "2025-12-06T15:55:07.656Z" }, - { url = "https://files.pythonhosted.org/packages/e9/3c/b404e94e0b02a232b957c54643ce68d0268dacb67ac33ffdee24008c8b27/orjson-3.11.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa0f513be38b40234c77975e68805506cad5d57b3dfd8fe3baa7f4f4051e15b4", size = 137332, upload-time = "2025-12-06T15:55:08.961Z" }, - { url = "https://files.pythonhosted.org/packages/51/30/cc2d69d5ce0ad9b84811cdf4a0cd5362ac27205a921da524ff42f26d65e0/orjson-3.11.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1863e75b92891f553b7922ce4ee10ed06db061e104f2b7815de80cdcb135ad", size = 138983, upload-time = "2025-12-06T15:55:10.595Z" }, - { url = "https://files.pythonhosted.org/packages/0e/87/de3223944a3e297d4707d2fe3b1ffb71437550e165eaf0ca8bbe43ccbcb1/orjson-3.11.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d4be86b58e9ea262617b8ca6251a2f0d63cc132a6da4b5fcc8e0a4128782c829", size = 141069, upload-time = "2025-12-06T15:55:11.832Z" }, - { url = "https://files.pythonhosted.org/packages/65/30/81d5087ae74be33bcae3ff2d80f5ccaa4a8fedc6d39bf65a427a95b8977f/orjson-3.11.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:b923c1c13fa02084eb38c9c065afd860a5cff58026813319a06949c3af5732ac", size = 413491, upload-time = "2025-12-06T15:55:13.314Z" }, - { url = "https://files.pythonhosted.org/packages/d0/6f/f6058c21e2fc1efaf918986dbc2da5cd38044f1a2d4b7b91ad17c4acf786/orjson-3.11.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:1b6bd351202b2cd987f35a13b5e16471cf4d952b42a73c391cc537974c43ef6d", size = 151375, upload-time = "2025-12-06T15:55:14.715Z" }, - { url = "https://files.pythonhosted.org/packages/54/92/c6921f17d45e110892899a7a563a925b2273d929959ce2ad89e2525b885b/orjson-3.11.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bb150d529637d541e6af06bbe3d02f5498d628b7f98267ff87647584293ab439", size = 141850, upload-time = "2025-12-06T15:55:15.94Z" }, - { url = "https://files.pythonhosted.org/packages/88/86/cdecb0140a05e1a477b81f24739da93b25070ee01ce7f7242f44a6437594/orjson-3.11.5-cp314-cp314-win32.whl", hash = "sha256:9cc1e55c884921434a84a0c3dd2699eb9f92e7b441d7f53f3941079ec6ce7499", size = 135278, upload-time = "2025-12-06T15:55:17.202Z" }, - { url = "https://files.pythonhosted.org/packages/e4/97/b638d69b1e947d24f6109216997e38922d54dcdcdb1b11c18d7efd2d3c59/orjson-3.11.5-cp314-cp314-win_amd64.whl", hash = "sha256:a4f3cb2d874e03bc7767c8f88adaa1a9a05cecea3712649c3b58589ec7317310", size = 133170, upload-time = "2025-12-06T15:55:18.468Z" }, - { url = "https://files.pythonhosted.org/packages/8f/dd/f4fff4a6fe601b4f8f3ba3aa6da8ac33d17d124491a3b804c662a70e1636/orjson-3.11.5-cp314-cp314-win_arm64.whl", hash = "sha256:38b22f476c351f9a1c43e5b07d8b5a02eb24a6ab8e75f700f7d479d4568346a5", size = 126713, upload-time = "2025-12-06T15:55:19.738Z" }, +version = "3.11.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/70/a3/4e09c61a5f0c521cba0bb433639610ae037437669f1a4cbc93799e731d78/orjson-3.11.6.tar.gz", hash = "sha256:0a54c72259f35299fd033042367df781c2f66d10252955ca1efb7db309b954cb", size = 6175856, upload-time = "2026-01-29T15:13:07.942Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/3c/098ed0e49c565fdf1ccc6a75b190115d1ca74148bf5b6ab036554a550650/orjson-3.11.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a613fc37e007143d5b6286dccb1394cd114b07832417006a02b620ddd8279e37", size = 250411, upload-time = "2026-01-29T15:11:17.941Z" }, + { url = "https://files.pythonhosted.org/packages/15/7c/cb11a360fd228ceebade03b1e8e9e138dd4b1b3b11602b72dbdad915aded/orjson-3.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46ebee78f709d3ba7a65384cfe285bb0763157c6d2f836e7bde2f12d33a867a2", size = 138147, upload-time = "2026-01-29T15:11:19.659Z" }, + { url = "https://files.pythonhosted.org/packages/4e/4b/e57b5c45ffe69fbef7cbd56e9f40e2dc0d5de920caafefcc6981d1a7efc5/orjson-3.11.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a726fa86d2368cd57990f2bd95ef5495a6e613b08fc9585dfe121ec758fb08d1", size = 135110, upload-time = "2026-01-29T15:11:21.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6e/4f21c6256f8cee3c0c69926cf7ac821cfc36f218512eedea2e2dc4a490c8/orjson-3.11.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:150f12e59d6864197770c78126e1a6e07a3da73d1728731bf3bc1e8b96ffdbe6", size = 140995, upload-time = "2026-01-29T15:11:22.902Z" }, + { url = "https://files.pythonhosted.org/packages/d0/78/92c36205ba2f6094ba1eea60c8e646885072abe64f155196833988c14b74/orjson-3.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a2d9746a5b5ce20c0908ada451eb56da4ffa01552a50789a0354d8636a02953", size = 144435, upload-time = "2026-01-29T15:11:24.124Z" }, + { url = "https://files.pythonhosted.org/packages/4d/52/1b518d164005811eb3fea92650e76e7d9deadb0b41e92c483373b1e82863/orjson-3.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd177f5dd91666d31e9019f1b06d2fcdf8a409a1637ddcb5915085dede85680", size = 142734, upload-time = "2026-01-29T15:11:25.708Z" }, + { url = "https://files.pythonhosted.org/packages/4b/11/60ea7885a2b7c1bf60ed8b5982356078a73785bd3bab392041a5bcf8de7c/orjson-3.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d777ec41a327bd3b7de97ba7bce12cc1007815ca398e4e4de9ec56c022c090b", size = 145802, upload-time = "2026-01-29T15:11:26.917Z" }, + { url = "https://files.pythonhosted.org/packages/41/7f/15a927e7958fd4f7560fb6dbb9346bee44a168e40168093c46020d866098/orjson-3.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f3a135f83185c87c13ff231fcb7dbb2fa4332a376444bd65135b50ff4cc5265c", size = 147504, upload-time = "2026-01-29T15:11:28.07Z" }, + { url = "https://files.pythonhosted.org/packages/66/1f/cabb9132a533f4f913e29294d0a1ca818b1a9a52e990526fe3f7ddd75f1c/orjson-3.11.6-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:2a8eeed7d4544cf391a142b0dd06029dac588e96cc692d9ab1c3f05b1e57c7f6", size = 421408, upload-time = "2026-01-29T15:11:29.314Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b9/09bda9257a982e300313e4a9fc9b9c3aaff424d07bcf765bf045e4e3ed03/orjson-3.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9d576865a21e5cc6695be8fb78afc812079fd361ce6a027a7d41561b61b33a90", size = 155801, upload-time = "2026-01-29T15:11:30.575Z" }, + { url = "https://files.pythonhosted.org/packages/98/19/4e40ea3e5f4c6a8d51f31fd2382351ee7b396fecca915b17cd1af588175b/orjson-3.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:925e2df51f60aa50f8797830f2adfc05330425803f4105875bb511ced98b7f89", size = 147647, upload-time = "2026-01-29T15:11:31.856Z" }, + { url = "https://files.pythonhosted.org/packages/5a/73/ef4bd7dd15042cf33a402d16b87b9e969e71edb452b63b6e2b05025d1f7d/orjson-3.11.6-cp310-cp310-win32.whl", hash = "sha256:09dded2de64e77ac0b312ad59f35023548fb87393a57447e1bb36a26c181a90f", size = 139770, upload-time = "2026-01-29T15:11:33.031Z" }, + { url = "https://files.pythonhosted.org/packages/b4/ac/daab6e10467f7fffd7081ba587b492505b49313130ff5446a6fe28bf076e/orjson-3.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:3a63b5e7841ca8635214c6be7c0bf0246aa8c5cd4ef0c419b14362d0b2fb13de", size = 136783, upload-time = "2026-01-29T15:11:34.686Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fd/d6b0a36854179b93ed77839f107c4089d91cccc9f9ba1b752b6e3bac5f34/orjson-3.11.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e259e85a81d76d9665f03d6129e09e4435531870de5961ddcd0bf6e3a7fde7d7", size = 250029, upload-time = "2026-01-29T15:11:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/a3/bb/22902619826641cf3b627c24aab62e2ad6b571bdd1d34733abb0dd57f67a/orjson-3.11.6-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:52263949f41b4a4822c6b1353bcc5ee2f7109d53a3b493501d3369d6d0e7937a", size = 134518, upload-time = "2026-01-29T15:11:37.347Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/7a818da4bba1de711a9653c420749c0ac95ef8f8651cbc1dca551f462fe0/orjson-3.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6439e742fa7834a24698d358a27346bb203bff356ae0402e7f5df8f749c621a8", size = 137917, upload-time = "2026-01-29T15:11:38.511Z" }, + { url = "https://files.pythonhosted.org/packages/59/0f/02846c1cac8e205cb3822dd8aa8f9114acda216f41fd1999ace6b543418d/orjson-3.11.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b81ffd68f084b4e993e3867acb554a049fa7787cc8710bbcc1e26965580d99be", size = 134923, upload-time = "2026-01-29T15:11:39.711Z" }, + { url = "https://files.pythonhosted.org/packages/94/cf/aeaf683001b474bb3c3c757073a4231dfdfe8467fceaefa5bfd40902c99f/orjson-3.11.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5a5468e5e60f7ef6d7f9044b06c8f94a3c56ba528c6e4f7f06ae95164b595ec", size = 140752, upload-time = "2026-01-29T15:11:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fe/dad52d8315a65f084044a0819d74c4c9daf9ebe0681d30f525b0d29a31f0/orjson-3.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72c5005eb45bd2535632d4f3bec7ad392832cfc46b62a3021da3b48a67734b45", size = 144201, upload-time = "2026-01-29T15:11:42.537Z" }, + { url = "https://files.pythonhosted.org/packages/36/bc/ab070dd421565b831801077f1e390c4d4af8bfcecafc110336680a33866b/orjson-3.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b14dd49f3462b014455a28a4d810d3549bf990567653eb43765cd847df09145", size = 142380, upload-time = "2026-01-29T15:11:44.309Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d8/4b581c725c3a308717f28bf45a9fdac210bca08b67e8430143699413ff06/orjson-3.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bb2c1ea30ef302f0f89f9bf3e7f9ab5e2af29dc9f80eb87aa99788e4e2d65", size = 145582, upload-time = "2026-01-29T15:11:45.506Z" }, + { url = "https://files.pythonhosted.org/packages/5b/a2/09aab99b39f9a7f175ea8fa29adb9933a3d01e7d5d603cdee7f1c40c8da2/orjson-3.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:825e0a85d189533c6bff7e2fc417a28f6fcea53d27125c4551979aecd6c9a197", size = 147270, upload-time = "2026-01-29T15:11:46.782Z" }, + { url = "https://files.pythonhosted.org/packages/b8/2f/5ef8eaf7829dc50da3bf497c7775b21ee88437bc8c41f959aa3504ca6631/orjson-3.11.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:b04575417a26530637f6ab4b1f7b4f666eb0433491091da4de38611f97f2fcf3", size = 421222, upload-time = "2026-01-29T15:11:48.106Z" }, + { url = "https://files.pythonhosted.org/packages/3b/b0/dd6b941294c2b5b13da5fdc7e749e58d0c55a5114ab37497155e83050e95/orjson-3.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b83eb2e40e8c4da6d6b340ee6b1d6125f5195eb1b0ebb7eac23c6d9d4f92d224", size = 155562, upload-time = "2026-01-29T15:11:49.408Z" }, + { url = "https://files.pythonhosted.org/packages/8e/09/43924331a847476ae2f9a16bd6d3c9dab301265006212ba0d3d7fd58763a/orjson-3.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1f42da604ee65a6b87eef858c913ce3e5777872b19321d11e6fc6d21de89b64f", size = 147432, upload-time = "2026-01-29T15:11:50.635Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e9/d9865961081816909f6b49d880749dbbd88425afd7c5bbce0549e2290d77/orjson-3.11.6-cp311-cp311-win32.whl", hash = "sha256:5ae45df804f2d344cffb36c43fdf03c82fb6cd247f5faa41e21891b40dfbf733", size = 139623, upload-time = "2026-01-29T15:11:51.82Z" }, + { url = "https://files.pythonhosted.org/packages/b4/f9/6836edb92f76eec1082919101eb1145d2f9c33c8f2c5e6fa399b82a2aaa8/orjson-3.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:f4295948d65ace0a2d8f2c4ccc429668b7eb8af547578ec882e16bf79b0050b2", size = 136647, upload-time = "2026-01-29T15:11:53.454Z" }, + { url = "https://files.pythonhosted.org/packages/b3/0c/4954082eea948c9ae52ee0bcbaa2f99da3216a71bcc314ab129bde22e565/orjson-3.11.6-cp311-cp311-win_arm64.whl", hash = "sha256:314e9c45e0b81b547e3a1cfa3df3e07a815821b3dac9fe8cb75014071d0c16a4", size = 135327, upload-time = "2026-01-29T15:11:56.616Z" }, + { url = "https://files.pythonhosted.org/packages/14/ba/759f2879f41910b7e5e0cdbd9cf82a4f017c527fb0e972e9869ca7fe4c8e/orjson-3.11.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6f03f30cd8953f75f2a439070c743c7336d10ee940da918d71c6f3556af3ddcf", size = 249988, upload-time = "2026-01-29T15:11:58.294Z" }, + { url = "https://files.pythonhosted.org/packages/f0/70/54cecb929e6c8b10104fcf580b0cc7dc551aa193e83787dd6f3daba28bb5/orjson-3.11.6-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:af44baae65ef386ad971469a8557a0673bb042b0b9fd4397becd9c2dfaa02588", size = 134445, upload-time = "2026-01-29T15:11:59.819Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6f/ec0309154457b9ba1ad05f11faa4441f76037152f75e1ac577db3ce7ca96/orjson-3.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c310a48542094e4f7dbb6ac076880994986dda8ca9186a58c3cb70a3514d3231", size = 137708, upload-time = "2026-01-29T15:12:01.488Z" }, + { url = "https://files.pythonhosted.org/packages/20/52/3c71b80840f8bab9cb26417302707b7716b7d25f863f3a541bcfa232fe6e/orjson-3.11.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8dfa7a5d387f15ecad94cb6b2d2d5f4aeea64efd8d526bfc03c9812d01e1cc0", size = 134798, upload-time = "2026-01-29T15:12:02.705Z" }, + { url = "https://files.pythonhosted.org/packages/30/51/b490a43b22ff736282360bd02e6bded455cf31dfc3224e01cd39f919bbd2/orjson-3.11.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba8daee3e999411b50f8b50dbb0a3071dd1845f3f9a1a0a6fa6de86d1689d84d", size = 140839, upload-time = "2026-01-29T15:12:03.956Z" }, + { url = "https://files.pythonhosted.org/packages/95/bc/4bcfe4280c1bc63c5291bb96f98298845b6355da2226d3400e17e7b51e53/orjson-3.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f89d104c974eafd7436d7a5fdbc57f7a1e776789959a2f4f1b2eab5c62a339f4", size = 144080, upload-time = "2026-01-29T15:12:05.151Z" }, + { url = "https://files.pythonhosted.org/packages/01/74/22970f9ead9ab1f1b5f8c227a6c3aa8d71cd2c5acd005868a1d44f2362fa/orjson-3.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2e2e2456788ca5ea75616c40da06fc885a7dc0389780e8a41bf7c5389ba257b", size = 142435, upload-time = "2026-01-29T15:12:06.641Z" }, + { url = "https://files.pythonhosted.org/packages/29/34/d564aff85847ab92c82ee43a7a203683566c2fca0723a5f50aebbe759603/orjson-3.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a42efebc45afabb1448001e90458c4020d5c64fbac8a8dc4045b777db76cb5a", size = 145631, upload-time = "2026-01-29T15:12:08.351Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ef/016957a3890752c4aa2368326ea69fa53cdc1fdae0a94a542b6410dbdf52/orjson-3.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71b7cbef8471324966c3738c90ba38775563ef01b512feb5ad4805682188d1b9", size = 147058, upload-time = "2026-01-29T15:12:10.023Z" }, + { url = "https://files.pythonhosted.org/packages/56/cc/9a899c3972085645b3225569f91a30e221f441e5dc8126e6d060b971c252/orjson-3.11.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:f8515e5910f454fe9a8e13c2bb9dc4bae4c1836313e967e72eb8a4ad874f0248", size = 421161, upload-time = "2026-01-29T15:12:11.308Z" }, + { url = "https://files.pythonhosted.org/packages/21/a8/767d3fbd6d9b8fdee76974db40619399355fd49bf91a6dd2c4b6909ccf05/orjson-3.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:300360edf27c8c9bf7047345a94fddf3a8b8922df0ff69d71d854a170cb375cf", size = 155757, upload-time = "2026-01-29T15:12:12.776Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0b/205cd69ac87e2272e13ef3f5f03a3d4657e317e38c1b08aaa2ef97060bbc/orjson-3.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:caaed4dad39e271adfadc106fab634d173b2bb23d9cf7e67bd645f879175ebfc", size = 147446, upload-time = "2026-01-29T15:12:14.166Z" }, + { url = "https://files.pythonhosted.org/packages/de/c5/dd9f22aa9f27c54c7d05cc32f4580c9ac9b6f13811eeb81d6c4c3f50d6b1/orjson-3.11.6-cp312-cp312-win32.whl", hash = "sha256:955368c11808c89793e847830e1b1007503a5923ddadc108547d3b77df761044", size = 139717, upload-time = "2026-01-29T15:12:15.7Z" }, + { url = "https://files.pythonhosted.org/packages/23/a1/e62fc50d904486970315a1654b8cfb5832eb46abb18cd5405118e7e1fc79/orjson-3.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:2c68de30131481150073d90a5d227a4a421982f42c025ecdfb66157f9579e06f", size = 136711, upload-time = "2026-01-29T15:12:17.055Z" }, + { url = "https://files.pythonhosted.org/packages/04/3d/b4fefad8bdf91e0fe212eb04975aeb36ea92997269d68857efcc7eb1dda3/orjson-3.11.6-cp312-cp312-win_arm64.whl", hash = "sha256:65dfa096f4e3a5e02834b681f539a87fbe85adc82001383c0db907557f666bfc", size = 135212, upload-time = "2026-01-29T15:12:18.3Z" }, + { url = "https://files.pythonhosted.org/packages/ae/45/d9c71c8c321277bc1ceebf599bc55ba826ae538b7c61f287e9a7e71bd589/orjson-3.11.6-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e4ae1670caabb598a88d385798692ce2a1b2f078971b3329cfb85253c6097f5b", size = 249828, upload-time = "2026-01-29T15:12:20.14Z" }, + { url = "https://files.pythonhosted.org/packages/ac/7e/4afcf4cfa9c2f93846d70eee9c53c3c0123286edcbeb530b7e9bd2aea1b2/orjson-3.11.6-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:2c6b81f47b13dac2caa5d20fbc953c75eb802543abf48403a4703ed3bff225f0", size = 134339, upload-time = "2026-01-29T15:12:22.01Z" }, + { url = "https://files.pythonhosted.org/packages/40/10/6d2b8a064c8d2411d3d0ea6ab43125fae70152aef6bea77bb50fa54d4097/orjson-3.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:647d6d034e463764e86670644bdcaf8e68b076e6e74783383b01085ae9ab334f", size = 137662, upload-time = "2026-01-29T15:12:23.307Z" }, + { url = "https://files.pythonhosted.org/packages/5a/50/5804ea7d586baf83ee88969eefda97a24f9a5bdba0727f73e16305175b26/orjson-3.11.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8523b9cc4ef174ae52414f7699e95ee657c16aa18b3c3c285d48d7966cce9081", size = 134626, upload-time = "2026-01-29T15:12:25.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/2e/f0492ed43e376722bb4afd648e06cc1e627fc7ec8ff55f6ee739277813ea/orjson-3.11.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:313dfd7184cde50c733fc0d5c8c0e2f09017b573afd11dc36bd7476b30b4cb17", size = 140873, upload-time = "2026-01-29T15:12:26.369Z" }, + { url = "https://files.pythonhosted.org/packages/10/15/6f874857463421794a303a39ac5494786ad46a4ab46d92bda6705d78c5aa/orjson-3.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:905ee036064ff1e1fd1fb800055ac477cdcb547a78c22c1bc2bbf8d5d1a6fb42", size = 144044, upload-time = "2026-01-29T15:12:28.082Z" }, + { url = "https://files.pythonhosted.org/packages/d2/c7/b7223a3a70f1d0cc2d86953825de45f33877ee1b124a91ca1f79aa6e643f/orjson-3.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce374cb98411356ba906914441fc993f271a7a666d838d8de0e0900dd4a4bc12", size = 142396, upload-time = "2026-01-29T15:12:30.529Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/aa1b6d3ad3cd80f10394134f73ae92a1d11fdbe974c34aa199cc18bb5fcf/orjson-3.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cded072b9f65fcfd188aead45efa5bd528ba552add619b3ad2a81f67400ec450", size = 145600, upload-time = "2026-01-29T15:12:31.848Z" }, + { url = "https://files.pythonhosted.org/packages/f6/cf/e4aac5a46cbd39d7e769ef8650efa851dfce22df1ba97ae2b33efe893b12/orjson-3.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ab85bdbc138e1f73a234db6bb2e4cc1f0fcec8f4bd2bd2430e957a01aadf746", size = 146967, upload-time = "2026-01-29T15:12:33.203Z" }, + { url = "https://files.pythonhosted.org/packages/0b/04/975b86a4bcf6cfeda47aad15956d52fbeda280811206e9967380fa9355c8/orjson-3.11.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:351b96b614e3c37a27b8ab048239ebc1e0be76cc17481a430d70a77fb95d3844", size = 421003, upload-time = "2026-01-29T15:12:35.097Z" }, + { url = "https://files.pythonhosted.org/packages/28/d1/0369d0baf40eea5ff2300cebfe209883b2473ab4aa4c4974c8bd5ee42bb2/orjson-3.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f9959c85576beae5cdcaaf39510b15105f1ee8b70d5dacd90152617f57be8c83", size = 155695, upload-time = "2026-01-29T15:12:36.589Z" }, + { url = "https://files.pythonhosted.org/packages/ab/1f/d10c6d6ae26ff1d7c3eea6fd048280ef2e796d4fb260c5424fd021f68ecf/orjson-3.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:75682d62b1b16b61a30716d7a2ec1f4c36195de4a1c61f6665aedd947b93a5d5", size = 147392, upload-time = "2026-01-29T15:12:37.876Z" }, + { url = "https://files.pythonhosted.org/packages/8d/43/7479921c174441a0aa5277c313732e20713c0969ac303be9f03d88d3db5d/orjson-3.11.6-cp313-cp313-win32.whl", hash = "sha256:40dc277999c2ef227dcc13072be879b4cfd325502daeb5c35ed768f706f2bf30", size = 139718, upload-time = "2026-01-29T15:12:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/88/bc/9ffe7dfbf8454bc4e75bb8bf3a405ed9e0598df1d3535bb4adcd46be07d0/orjson-3.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:f0f6e9f8ff7905660bc3c8a54cd4a675aa98f7f175cf00a59815e2ff42c0d916", size = 136635, upload-time = "2026-01-29T15:12:40.593Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/51fa90b451470447ea5023b20d83331ec741ae28d1e6d8ed547c24e7de14/orjson-3.11.6-cp313-cp313-win_arm64.whl", hash = "sha256:1608999478664de848e5900ce41f25c4ecdfc4beacbc632b6fd55e1a586e5d38", size = 135175, upload-time = "2026-01-29T15:12:41.997Z" }, + { url = "https://files.pythonhosted.org/packages/31/9f/46ca908abaeeec7560638ff20276ab327b980d73b3cc2f5b205b4a1c60b3/orjson-3.11.6-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6026db2692041d2a23fe2545606df591687787825ad5821971ef0974f2c47630", size = 249823, upload-time = "2026-01-29T15:12:43.332Z" }, + { url = "https://files.pythonhosted.org/packages/ff/78/ca478089818d18c9cd04f79c43f74ddd031b63c70fa2a946eb5e85414623/orjson-3.11.6-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:132b0ab2e20c73afa85cf142e547511feb3d2f5b7943468984658f3952b467d4", size = 134328, upload-time = "2026-01-29T15:12:45.171Z" }, + { url = "https://files.pythonhosted.org/packages/39/5e/cbb9d830ed4e47f4375ad8eef8e4fff1bf1328437732c3809054fc4e80be/orjson-3.11.6-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b376fb05f20a96ec117d47987dd3b39265c635725bda40661b4c5b73b77b5fde", size = 137651, upload-time = "2026-01-29T15:12:46.602Z" }, + { url = "https://files.pythonhosted.org/packages/7c/3a/35df6558c5bc3a65ce0961aefee7f8364e59af78749fc796ea255bfa0cf5/orjson-3.11.6-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:954dae4e080574672a1dfcf2a840eddef0f27bd89b0e94903dd0824e9c1db060", size = 134596, upload-time = "2026-01-29T15:12:47.95Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8e/3d32dd7b7f26a19cc4512d6ed0ae3429567c71feef720fe699ff43c5bc9e/orjson-3.11.6-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe515bb89d59e1e4b48637a964f480b35c0a2676de24e65e55310f6016cca7ce", size = 140923, upload-time = "2026-01-29T15:12:49.333Z" }, + { url = "https://files.pythonhosted.org/packages/6c/9c/1efbf5c99b3304f25d6f0d493a8d1492ee98693637c10ce65d57be839d7b/orjson-3.11.6-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:380f9709c275917af28feb086813923251e11ee10687257cd7f1ea188bcd4485", size = 144068, upload-time = "2026-01-29T15:12:50.927Z" }, + { url = "https://files.pythonhosted.org/packages/82/83/0d19eeb5be797de217303bbb55dde58dba26f996ed905d301d98fd2d4637/orjson-3.11.6-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8173e0d3f6081e7034c51cf984036d02f6bab2a2126de5a759d79f8e5a140e7", size = 142493, upload-time = "2026-01-29T15:12:52.432Z" }, + { url = "https://files.pythonhosted.org/packages/32/a7/573fec3df4dc8fc259b7770dc6c0656f91adce6e19330c78d23f87945d1e/orjson-3.11.6-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dddf9ba706294906c56ef5150a958317b09aa3a8a48df1c52ccf22ec1907eac", size = 145616, upload-time = "2026-01-29T15:12:53.903Z" }, + { url = "https://files.pythonhosted.org/packages/c2/0e/23551b16f21690f7fd5122e3cf40fdca5d77052a434d0071990f97f5fe2f/orjson-3.11.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cbae5c34588dc79938dffb0b6fbe8c531f4dc8a6ad7f39759a9eb5d2da405ef2", size = 146951, upload-time = "2026-01-29T15:12:55.698Z" }, + { url = "https://files.pythonhosted.org/packages/b8/63/5e6c8f39805c39123a18e412434ea364349ee0012548d08aa586e2bd6aa9/orjson-3.11.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:f75c318640acbddc419733b57f8a07515e587a939d8f54363654041fd1f4e465", size = 421024, upload-time = "2026-01-29T15:12:57.434Z" }, + { url = "https://files.pythonhosted.org/packages/1d/4d/724975cf0087f6550bd01fd62203418afc0ea33fd099aed318c5bcc52df8/orjson-3.11.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e0ab8d13aa2a3e98b4a43487c9205b2c92c38c054b4237777484d503357c8437", size = 155774, upload-time = "2026-01-29T15:12:59.397Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a3/f4c4e3f46b55db29e0a5f20493b924fc791092d9a03ff2068c9fe6c1002f/orjson-3.11.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f884c7fb1020d44612bd7ac0db0babba0e2f78b68d9a650c7959bf99c783773f", size = 147393, upload-time = "2026-01-29T15:13:00.769Z" }, + { url = "https://files.pythonhosted.org/packages/ee/86/6f5529dd27230966171ee126cecb237ed08e9f05f6102bfaf63e5b32277d/orjson-3.11.6-cp314-cp314-win32.whl", hash = "sha256:8d1035d1b25732ec9f971e833a3e299d2b1a330236f75e6fd945ad982c76aaf3", size = 139760, upload-time = "2026-01-29T15:13:02.173Z" }, + { url = "https://files.pythonhosted.org/packages/d3/b5/91ae7037b2894a6b5002fb33f4fbccec98424a928469835c3837fbb22a9b/orjson-3.11.6-cp314-cp314-win_amd64.whl", hash = "sha256:931607a8865d21682bb72de54231655c86df1870502d2962dbfd12c82890d077", size = 136633, upload-time = "2026-01-29T15:13:04.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/74/f473a3ec7a0a7ebc825ca8e3c86763f7d039f379860c81ba12dcdd456547/orjson-3.11.6-cp314-cp314-win_arm64.whl", hash = "sha256:fe71f6b283f4f1832204ab8235ce07adad145052614f77c876fcf0dac97bc06f", size = 135168, upload-time = "2026-01-29T15:13:05.932Z" }, ] [[package]] @@ -4609,11 +4604,11 @@ wheels = [ [[package]] name = "pathspec" -version = "1.0.3" +version = "1.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/b2/bb8e495d5262bfec41ab5cb18f522f1012933347fb5d9e62452d446baca2/pathspec-1.0.3.tar.gz", hash = "sha256:bac5cf97ae2c2876e2d25ebb15078eb04d76e4b98921ee31c6f85ade8b59444d", size = 130841, upload-time = "2026-01-09T15:46:46.009Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/2b/121e912bd60eebd623f873fd090de0e84f322972ab25a7f9044c056804ed/pathspec-1.0.3-py3-none-any.whl", hash = "sha256:e80767021c1cc524aa3fb14bedda9c34406591343cc42797b386ce7b9354fb6c", size = 55021, upload-time = "2026-01-09T15:46:44.652Z" }, + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, ] [[package]] @@ -4968,30 +4963,30 @@ wheels = [ [[package]] name = "psutil" -version = "7.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/cb/09e5184fb5fc0358d110fc3ca7f6b1d033800734d34cac10f4136cfac10e/psutil-7.2.1.tar.gz", hash = "sha256:f7583aec590485b43ca601dd9cea0dcd65bd7bb21d30ef4ddbf4ea6b5ed1bdd3", size = 490253, upload-time = "2025-12-29T08:26:00.169Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/8e/f0c242053a368c2aa89584ecd1b054a18683f13d6e5a318fc9ec36582c94/psutil-7.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9f33bb525b14c3ea563b2fd521a84d2fa214ec59e3e6a2858f78d0844dd60d", size = 129624, upload-time = "2025-12-29T08:26:04.255Z" }, - { url = "https://files.pythonhosted.org/packages/26/97/a58a4968f8990617decee234258a2b4fc7cd9e35668387646c1963e69f26/psutil-7.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:81442dac7abfc2f4f4385ea9e12ddf5a796721c0f6133260687fec5c3780fa49", size = 130132, upload-time = "2025-12-29T08:26:06.228Z" }, - { url = "https://files.pythonhosted.org/packages/db/6d/ed44901e830739af5f72a85fa7ec5ff1edea7f81bfbf4875e409007149bd/psutil-7.2.1-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ea46c0d060491051d39f0d2cff4f98d5c72b288289f57a21556cc7d504db37fc", size = 180612, upload-time = "2025-12-29T08:26:08.276Z" }, - { url = "https://files.pythonhosted.org/packages/c7/65/b628f8459bca4efbfae50d4bf3feaab803de9a160b9d5f3bd9295a33f0c2/psutil-7.2.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35630d5af80d5d0d49cfc4d64c1c13838baf6717a13effb35869a5919b854cdf", size = 183201, upload-time = "2025-12-29T08:26:10.622Z" }, - { url = "https://files.pythonhosted.org/packages/fb/23/851cadc9764edcc18f0effe7d0bf69f727d4cf2442deb4a9f78d4e4f30f2/psutil-7.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:923f8653416604e356073e6e0bccbe7c09990acef442def2f5640dd0faa9689f", size = 139081, upload-time = "2025-12-29T08:26:12.483Z" }, - { url = "https://files.pythonhosted.org/packages/59/82/d63e8494ec5758029f31c6cb06d7d161175d8281e91d011a4a441c8a43b5/psutil-7.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cfbe6b40ca48019a51827f20d830887b3107a74a79b01ceb8cc8de4ccb17b672", size = 134767, upload-time = "2025-12-29T08:26:14.528Z" }, - { url = "https://files.pythonhosted.org/packages/05/c2/5fb764bd61e40e1fe756a44bd4c21827228394c17414ade348e28f83cd79/psutil-7.2.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:494c513ccc53225ae23eec7fe6e1482f1b8a44674241b54561f755a898650679", size = 129716, upload-time = "2025-12-29T08:26:16.017Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d2/935039c20e06f615d9ca6ca0ab756cf8408a19d298ffaa08666bc18dc805/psutil-7.2.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3fce5f92c22b00cdefd1645aa58ab4877a01679e901555067b1bd77039aa589f", size = 130133, upload-time = "2025-12-29T08:26:18.009Z" }, - { url = "https://files.pythonhosted.org/packages/77/69/19f1eb0e01d24c2b3eacbc2f78d3b5add8a89bf0bb69465bc8d563cc33de/psutil-7.2.1-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93f3f7b0bb07711b49626e7940d6fe52aa9940ad86e8f7e74842e73189712129", size = 181518, upload-time = "2025-12-29T08:26:20.241Z" }, - { url = "https://files.pythonhosted.org/packages/e1/6d/7e18b1b4fa13ad370787626c95887b027656ad4829c156bb6569d02f3262/psutil-7.2.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d34d2ca888208eea2b5c68186841336a7f5e0b990edec929be909353a202768a", size = 184348, upload-time = "2025-12-29T08:26:22.215Z" }, - { url = "https://files.pythonhosted.org/packages/98/60/1672114392dd879586d60dd97896325df47d9a130ac7401318005aab28ec/psutil-7.2.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2ceae842a78d1603753561132d5ad1b2f8a7979cb0c283f5b52fb4e6e14b1a79", size = 140400, upload-time = "2025-12-29T08:26:23.993Z" }, - { url = "https://files.pythonhosted.org/packages/fb/7b/d0e9d4513c46e46897b46bcfc410d51fc65735837ea57a25170f298326e6/psutil-7.2.1-cp314-cp314t-win_arm64.whl", hash = "sha256:08a2f175e48a898c8eb8eace45ce01777f4785bc744c90aa2cc7f2fa5462a266", size = 135430, upload-time = "2025-12-29T08:26:25.999Z" }, - { url = "https://files.pythonhosted.org/packages/c5/cf/5180eb8c8bdf6a503c6919f1da28328bd1e6b3b1b5b9d5b01ae64f019616/psutil-7.2.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2e953fcfaedcfbc952b44744f22d16575d3aa78eb4f51ae74165b4e96e55f42", size = 128137, upload-time = "2025-12-29T08:26:27.759Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2c/78e4a789306a92ade5000da4f5de3255202c534acdadc3aac7b5458fadef/psutil-7.2.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:05cc68dbb8c174828624062e73078e7e35406f4ca2d0866c272c2410d8ef06d1", size = 128947, upload-time = "2025-12-29T08:26:29.548Z" }, - { url = "https://files.pythonhosted.org/packages/29/f8/40e01c350ad9a2b3cb4e6adbcc8a83b17ee50dd5792102b6142385937db5/psutil-7.2.1-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e38404ca2bb30ed7267a46c02f06ff842e92da3bb8c5bfdadbd35a5722314d8", size = 154694, upload-time = "2025-12-29T08:26:32.147Z" }, - { url = "https://files.pythonhosted.org/packages/06/e4/b751cdf839c011a9714a783f120e6a86b7494eb70044d7d81a25a5cd295f/psutil-7.2.1-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab2b98c9fc19f13f59628d94df5cc4cc4844bc572467d113a8b517d634e362c6", size = 156136, upload-time = "2025-12-29T08:26:34.079Z" }, - { url = "https://files.pythonhosted.org/packages/44/ad/bbf6595a8134ee1e94a4487af3f132cef7fce43aef4a93b49912a48c3af7/psutil-7.2.1-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f78baafb38436d5a128f837fab2d92c276dfb48af01a240b861ae02b2413ada8", size = 148108, upload-time = "2025-12-29T08:26:36.225Z" }, - { url = "https://files.pythonhosted.org/packages/1c/15/dd6fd869753ce82ff64dcbc18356093471a5a5adf4f77ed1f805d473d859/psutil-7.2.1-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:99a4cd17a5fdd1f3d014396502daa70b5ec21bf4ffe38393e152f8e449757d67", size = 147402, upload-time = "2025-12-29T08:26:39.21Z" }, - { url = "https://files.pythonhosted.org/packages/34/68/d9317542e3f2b180c4306e3f45d3c922d7e86d8ce39f941bb9e2e9d8599e/psutil-7.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:b1b0671619343aa71c20ff9767eced0483e4fc9e1f489d50923738caf6a03c17", size = 136938, upload-time = "2025-12-29T08:26:41.036Z" }, - { url = "https://files.pythonhosted.org/packages/3e/73/2ce007f4198c80fcf2cb24c169884f833fe93fbc03d55d302627b094ee91/psutil-7.2.1-cp37-abi3-win_arm64.whl", hash = "sha256:0d67c1822c355aa6f7314d92018fb4268a76668a536f133599b91edd48759442", size = 133836, upload-time = "2025-12-29T08:26:43.086Z" }, +version = "7.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/08/510cbdb69c25a96f4ae523f733cdc963ae654904e8db864c07585ef99875/psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b", size = 130595, upload-time = "2026-01-28T18:14:57.293Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/97baea3fe7a5a9af7436301f85490905379b1c6f2dd51fe3ecf24b4c5fbf/psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea", size = 131082, upload-time = "2026-01-28T18:14:59.732Z" }, + { url = "https://files.pythonhosted.org/packages/37/d6/246513fbf9fa174af531f28412297dd05241d97a75911ac8febefa1a53c6/psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63", size = 181476, upload-time = "2026-01-28T18:15:01.884Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b5/9182c9af3836cca61696dabe4fd1304e17bc56cb62f17439e1154f225dd3/psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312", size = 184062, upload-time = "2026-01-28T18:15:04.436Z" }, + { url = "https://files.pythonhosted.org/packages/16/ba/0756dca669f5a9300d0cbcbfae9a4c30e446dfc7440ffe43ded5724bfd93/psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b", size = 139893, upload-time = "2026-01-28T18:15:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/1c/61/8fa0e26f33623b49949346de05ec1ddaad02ed8ba64af45f40a147dbfa97/psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9", size = 135589, upload-time = "2026-01-28T18:15:08.03Z" }, + { url = "https://files.pythonhosted.org/packages/81/69/ef179ab5ca24f32acc1dac0c247fd6a13b501fd5534dbae0e05a1c48b66d/psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00", size = 130664, upload-time = "2026-01-28T18:15:09.469Z" }, + { url = "https://files.pythonhosted.org/packages/7b/64/665248b557a236d3fa9efc378d60d95ef56dd0a490c2cd37dafc7660d4a9/psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9", size = 131087, upload-time = "2026-01-28T18:15:11.724Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2e/e6782744700d6759ebce3043dcfa661fb61e2fb752b91cdeae9af12c2178/psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a", size = 182383, upload-time = "2026-01-28T18:15:13.445Z" }, + { url = "https://files.pythonhosted.org/packages/57/49/0a41cefd10cb7505cdc04dab3eacf24c0c2cb158a998b8c7b1d27ee2c1f5/psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf", size = 185210, upload-time = "2026-01-28T18:15:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/dd/2c/ff9bfb544f283ba5f83ba725a3c5fec6d6b10b8f27ac1dc641c473dc390d/psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1", size = 141228, upload-time = "2026-01-28T18:15:18.385Z" }, + { url = "https://files.pythonhosted.org/packages/f2/fc/f8d9c31db14fcec13748d373e668bc3bed94d9077dbc17fb0eebc073233c/psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841", size = 136284, upload-time = "2026-01-28T18:15:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, + { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, + { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, + { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, ] [[package]] @@ -8014,11 +8009,11 @@ wheels = [ [[package]] name = "wcwidth" -version = "0.4.0" +version = "0.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/0a/dc5110cc99c39df65bac29229c4b637a8304e0914850348d98974c8ecfff/wcwidth-0.4.0.tar.gz", hash = "sha256:46478e02cf7149ba150fb93c39880623ee7e5181c64eda167b6a1de51b7a7ba1", size = 237625, upload-time = "2026-01-26T02:35:58.844Z" } +sdist = { url = "https://files.pythonhosted.org/packages/64/6e/62daec357285b927e82263a81f3b4c1790215bc77c42530ce4a69d501a43/wcwidth-0.5.0.tar.gz", hash = "sha256:f89c103c949a693bf563377b2153082bf58e309919dfb7f27b04d862a0089333", size = 246585, upload-time = "2026-01-27T01:31:44.942Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/f6/da704c5e77281d71723bffbd926b754c0efd57cbcd02e74c2ca374c14cef/wcwidth-0.4.0-py3-none-any.whl", hash = "sha256:8af2c81174b3aa17adf05058c543c267e4e5b6767a28e31a673a658c1d766783", size = 88216, upload-time = "2026-01-26T02:35:57.461Z" }, + { url = "https://files.pythonhosted.org/packages/f2/3e/45583b67c2ff08ad5a582d316fcb2f11d6cf0a50c7707ac09d212d25bc98/wcwidth-0.5.0-py3-none-any.whl", hash = "sha256:1efe1361b83b0ff7877b81ba57c8562c99cf812158b778988ce17ec061095695", size = 93772, upload-time = "2026-01-27T01:31:43.432Z" }, ] [[package]]