diff --git a/python-objectstore-client/pyproject.toml b/python-objectstore-client/pyproject.toml index 8c798e0a..ef496bbe 100644 --- a/python-objectstore-client/pyproject.toml +++ b/python-objectstore-client/pyproject.toml @@ -4,7 +4,11 @@ version = "0.1.0" description = "Python client for the Sentry Objectstore service" readme = "README.md" requires-python = ">=3.13.1" -dependencies = [] +dependencies = [ + "sentry-sdk>=2.42.1", + "urllib3>=2.5.0", + "zstandard>=0.18.0", +] [build-system] requires = ["uv_build"] diff --git a/python-objectstore-client/src/objectstore_client/client.py b/python-objectstore-client/src/objectstore_client/client.py new file mode 100644 index 00000000..a11f1ef0 --- /dev/null +++ b/python-objectstore-client/src/objectstore_client/client.py @@ -0,0 +1,239 @@ +from __future__ import annotations + +from io import BytesIO +from typing import IO, Literal, NamedTuple, NotRequired, Self, TypedDict, cast +from urllib.parse import urlencode + +import sentry_sdk +import urllib3 +import zstandard +from urllib3.connectionpool import HTTPConnectionPool + +from objectstore_client.metadata import ( + HEADER_EXPIRATION, + HEADER_META_PREFIX, + Compression, + ExpirationPolicy, + Metadata, + format_expiration, +) +from objectstore_client.metrics import ( + MetricsBackend, + NoOpMetricsBackend, + measure_storage_operation, +) + +Permission = Literal["read", "write"] + + +class Scope(TypedDict): + organization: int + project: NotRequired[int] + + +class GetResult(NamedTuple): + metadata: Metadata + payload: IO[bytes] + + +class ClientBuilder: + def __init__( + self, + objectstore_base_url: str, + usecase: str, + metrics_backend: MetricsBackend | None = None, + propagate_traces: bool = False, + ): + self._base_url = objectstore_base_url + self._usecase = usecase + self._default_compression: Compression = "zstd" + self._propagate_traces = propagate_traces + self._metrics_backend = metrics_backend or NoOpMetricsBackend() + + def _make_client(self, scope: str) -> Client: + pool = urllib3.connectionpool.connection_from_url(self._base_url) + return Client( + pool, + self._default_compression, + self._usecase, + scope, + self._propagate_traces, + self._metrics_backend, + ) + + def default_compression(self, default_compression: Compression) -> Self: + self._default_compression = default_compression + return self + + def for_organization(self, organization_id: int) -> Client: + return self._make_client(f"org.{organization_id}") + + def for_project(self, organization_id: int, project_id: int) -> Client: + return self._make_client(f"org.{organization_id}/proj.{project_id}") + + +class Client: + _default_compression: Compression + + def __init__( + self, + pool: HTTPConnectionPool, + default_compression: Compression, + usecase: str, + scope: str, + propagate_traces: bool, + metrics_backend: MetricsBackend, + ): + self._pool = pool + self._default_compression = default_compression + self._usecase = usecase + self._scope = scope + self._propagate_traces = propagate_traces + self._metrics_backend = metrics_backend + + def _make_headers(self) -> dict[str, str]: + if self._propagate_traces: + return dict(sentry_sdk.get_current_scope().iter_trace_propagation_headers()) + return {} + + def _make_url(self, id: str | None, full: bool = False) -> str: + base_path = f"/v1/{id}" if id else "/v1/" + qs = urlencode({"usecase": self._usecase, "scope": self._scope}) + if full: + return f"http://{self._pool.host}:{self._pool.port}{base_path}?{qs}" + else: + return f"{base_path}?{qs}" + + def put( + self, + contents: bytes | IO[bytes], + id: str | None = None, + compression: Compression | Literal["none"] | None = None, + metadata: dict[str, str] | None = None, + expiration_policy: ExpirationPolicy | None = None, + ) -> str: + """ + Uploads the given `contents` to blob storage. + + If no `id` is provided, one will be automatically generated and returned + from this function. + + The client will select the configured `default_compression` if none is given + explicitly. + This can be overridden by explicitly giving a `compression` argument. + Providing `"none"` as the argument will instruct the client to not apply + any compression to this upload, which is useful for uncompressible formats. + """ + headers = self._make_headers() + body = BytesIO(contents) if isinstance(contents, bytes) else contents + original_body: IO[bytes] = body + + compression = compression or self._default_compression + if compression == "zstd": + cctx = zstandard.ZstdCompressor() + body = cctx.stream_reader(original_body) + headers["Content-Encoding"] = "zstd" + + if expiration_policy: + headers[HEADER_EXPIRATION] = format_expiration(expiration_policy) + + if metadata: + for k, v in metadata.items(): + headers[f"{HEADER_META_PREFIX}{k}"] = v + + with measure_storage_operation( + self._metrics_backend, "put", self._usecase + ) as metric_emitter: + response = self._pool.request( + "PUT", + self._make_url(id), + body=body, + headers=headers, + preload_content=True, + decode_content=True, + ) + raise_for_status(response) + res = response.json() + + # Must do this after streaming `body` as that's what is responsible + # for advancing the seek position in both streams + metric_emitter.record_uncompressed_size(original_body.tell()) + if compression and compression != "none": + metric_emitter.record_compressed_size(body.tell(), compression) + return res["key"] + + def get(self, id: str, decompress: bool = True) -> GetResult: + """ + This fetches the blob with the given `id`, returning an `IO` stream that + can be read. + + By default, content that was uploaded compressed will be automatically + decompressed, unless `decompress=True` is passed. + """ + + headers = self._make_headers() + with measure_storage_operation(self._metrics_backend, "get", self._usecase): + response = self._pool.request( + "GET", + self._make_url(id), + preload_content=False, + decode_content=False, + headers=headers, + ) + raise_for_status(response) + # OR: should I use `response.stream()`? + stream = cast(IO[bytes], response) + metadata = Metadata.from_headers(response.headers) + + if metadata.compression and decompress: + if metadata.compression != "zstd": + raise NotImplementedError( + "Transparent decoding of anything but `zstd` is not implemented yet" + ) + + metadata.compression = None + dctx = zstandard.ZstdDecompressor() + stream = dctx.stream_reader(stream, read_across_frames=True) + + return GetResult(metadata, stream) + + def object_url(self, id: str) -> str: + """ + Generates a GET url to the object with the given `id`. + + This can then be used by downstream services to fetch the given object. + NOTE however that the service does not strictly follow HTTP semantics, + in particular in relation to `Accept-Encoding`. + """ + return self._make_url(id, full=True) + + def delete(self, id: str) -> None: + """ + Deletes the blob with the given `id`. + """ + + headers = self._make_headers() + with measure_storage_operation(self._metrics_backend, "delete", self._usecase): + response = self._pool.request( + "DELETE", + self._make_url(id), + headers=headers, + ) + raise_for_status(response) + + +class ClientError(Exception): + def __init__(self, message: str, status: int, response: str): + super().__init__(message) + self.status = status + self.response = response + + +def raise_for_status(response: urllib3.BaseHTTPResponse) -> None: + if response.status >= 400: + res = str(response.data or response.read()) + raise ClientError( + f"Objectstore request failed with status {response.status}", + response.status, + res, + ) diff --git a/python-objectstore-client/src/objectstore_client/metadata.py b/python-objectstore-client/src/objectstore_client/metadata.py new file mode 100644 index 00000000..f7488959 --- /dev/null +++ b/python-objectstore-client/src/objectstore_client/metadata.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import itertools +import re +from collections.abc import Mapping +from dataclasses import dataclass +from datetime import timedelta +from typing import Literal, cast + +Compression = Literal["zstd"] + +HEADER_EXPIRATION = "x-sn-expiration" +HEADER_META_PREFIX = "x-snme-" + + +@dataclass +class TimeToIdle: + delta: timedelta + + +@dataclass +class TimeToLive: + delta: timedelta + + +ExpirationPolicy = TimeToIdle | TimeToLive + + +@dataclass +class Metadata: + compression: Compression | None + expiration_policy: ExpirationPolicy | None + custom: dict[str, str] + + @classmethod + def from_headers(cls, headers: Mapping[str, str]) -> Metadata: + compression = None + expiration_policy = None + custom_metadata = {} + for k, v in headers.items(): + if k == "content-encoding": + compression = cast(Compression | None, v) + elif k == HEADER_EXPIRATION: + expiration_policy = parse_expiration(v) + elif k.startswith(HEADER_META_PREFIX): + custom_metadata[k[len(HEADER_META_PREFIX) :]] = v + return Metadata(compression, expiration_policy, custom_metadata) + + +def format_expiration(expiration_policy: ExpirationPolicy) -> str: + if isinstance(expiration_policy, TimeToIdle): + return f"tti:{format_timedelta(expiration_policy.delta)}" + elif isinstance(expiration_policy, TimeToLive): + return f"ttl:{format_timedelta(expiration_policy.delta)}" + + +def parse_expiration(value: str) -> ExpirationPolicy | None: + if value.startswith("tti:"): + return TimeToIdle(parse_timedelta(value[4:])) + elif value.startswith("ttl:"): + return TimeToLive(parse_timedelta(value[4:])) + + return None + + +def format_timedelta(delta: timedelta) -> str: + days = delta.days + output = f"{days} days" if days else "" + if seconds := delta.seconds: + if output: + output += " " + output += f"{seconds} seconds" + + return output + + +TIME_SPLIT = re.compile(r"[^\W\d_]+|\d+") + + +def parse_timedelta(delta: str) -> timedelta: + words = TIME_SPLIT.findall(delta) + seconds = 0 + + for num, unit in itertools.batched(words, n=2, strict=True): + num = int(num) + multiplier = 0 + + if unit.startswith("w"): + multiplier = 86400 * 7 + elif unit.startswith("d"): + multiplier = 86400 + elif unit.startswith("h"): + multiplier = 3600 + elif unit.startswith("m") and not unit.startswith("ms"): + multiplier = 60 + elif unit.startswith("s"): + multiplier = 1 + + seconds += num * multiplier + + return timedelta(seconds=seconds) diff --git a/python-objectstore-client/src/objectstore_client/metrics.py b/python-objectstore-client/src/objectstore_client/metrics.py new file mode 100644 index 00000000..801875ca --- /dev/null +++ b/python-objectstore-client/src/objectstore_client/metrics.py @@ -0,0 +1,182 @@ +from __future__ import annotations + +import time +from abc import abstractmethod +from collections.abc import Generator, Mapping +from contextlib import contextmanager +from typing import Protocol, runtime_checkable + +Tags = Mapping[str, str] + + +@runtime_checkable +class MetricsBackend(Protocol): + """ + An abstract class that defines the interface for metrics backends. + """ + + @abstractmethod + def increment( + self, + name: str, + value: int | float = 1, + tags: Tags | None = None, + ) -> None: + """ + Increments a counter metric by a given value. + """ + raise NotImplementedError + + @abstractmethod + def gauge(self, name: str, value: int | float, tags: Tags | None = None) -> None: + """ + Sets a gauge metric to the given value. + """ + raise NotImplementedError + + @abstractmethod + def distribution( + self, + name: str, + value: int | float, + tags: Tags | None = None, + unit: str | None = None, + ) -> None: + """ + Records a distribution metric. + """ + raise NotImplementedError + + +class NoOpMetricsBackend(MetricsBackend): + """ + Default metrics backend that does not record anything. + """ + + def increment( + self, + name: str, + value: int | float = 1, + tags: Tags | None = None, + ) -> None: + pass + + def gauge(self, name: str, value: int | float, tags: Tags | None = None) -> None: + pass + + def distribution( + self, + name: str, + value: int | float, + tags: Tags | None = None, + unit: str | None = None, + ) -> None: + pass + + +class StorageMetricEmitter: + def __init__(self, backend: MetricsBackend, operation: str, usecase: str): + self.backend = backend + self.operation = operation + self.usecase = usecase + + # These may be set during or after the enclosed operation + self.start: int | None = None + self.elapsed: float | None = None + self.uncompressed_size: int | None = None + self.compressed_size: int | None = None + self.compression: str = "unknown" + + def record_latency(self, elapsed: float) -> None: + tags = {"usecase": self.usecase} + self.backend.distribution( + f"storage.{self.operation}.latency", elapsed, tags=tags + ) + self.elapsed = elapsed + + def record_uncompressed_size(self, value: int) -> None: + tags = {"usecase": self.usecase, "compression": "none"} + self.backend.distribution( + f"storage.{self.operation}.size", value, tags=tags, unit="byte" + ) + self.uncompressed_size = value + + def record_compressed_size(self, value: int, compression: str = "unknown") -> None: + tags = {"usecase": self.usecase, "compression": compression} + self.backend.distribution( + f"storage.{self.operation}.size", value, tags=tags, unit="byte" + ) + self.compressed_size = value + self.compression = compression + + def maybe_record_compression_ratio(self) -> None: + if not self.uncompressed_size or not self.compressed_size: + return None + + tags = {"usecase": self.usecase, "compression": self.compression} + self.backend.distribution( + f"storage.{self.operation}.compression_ratio", + self.compressed_size / self.uncompressed_size, + tags=tags, + ) + + def maybe_record_throughputs(self) -> None: + if not self.elapsed or self.elapsed <= 0: + return None + + sizes = [] + if self.uncompressed_size: + sizes.append((self.uncompressed_size, "none")) + if self.compressed_size: + sizes.append((self.compressed_size, self.compression)) + + for size, compression in sizes: + tags = {"usecase": self.usecase, "compression": compression} + self.backend.distribution( + f"storage.{self.operation}.throughput", size / self.elapsed, tags=tags + ) + self.backend.distribution( + f"storage.{self.operation}.inverse_throughput", + self.elapsed / size, + tags=tags, + ) + + +@contextmanager +def measure_storage_operation( + backend: MetricsBackend, + operation: str, + usecase: str, + uncompressed_size: int | None = None, + compressed_size: int | None = None, + compression: str = "unknown", +) -> Generator[StorageMetricEmitter]: + """ + Context manager which records the latency of the enclosed storage operation. + Can also record the compressed or uncompressed size of an object, the + compression ratio, the throughput, and the inverse throughput. + + Yields a `StorageMetricEmitter` because for some operations (GET) the size + is not known until the inside of the enclosed block. + """ + emitter = StorageMetricEmitter(backend, operation, usecase) + + if uncompressed_size: + emitter.record_uncompressed_size(uncompressed_size) + if compressed_size: + emitter.record_compressed_size(compressed_size, compression) + + start = time.monotonic() + + # Yield an emitter in case the size becomes known inside the enclosed block + try: + yield emitter + + finally: + elapsed = time.monotonic() - start + emitter.record_latency(elapsed) + + # If `uncompressed_size` and/or `compressed_size` have been set, we have + # extra metrics we can send. + emitter.maybe_record_compression_ratio() + emitter.maybe_record_throughputs() diff --git a/python-objectstore-client/tests/test_smoke.py b/python-objectstore-client/tests/test_smoke.py index 07122242..154515bc 100644 --- a/python-objectstore-client/tests/test_smoke.py +++ b/python-objectstore-client/tests/test_smoke.py @@ -1,4 +1,3 @@ def test_imports() -> None: - import objectstore_client - - _ = objectstore_client + import objectstore_client # noqa: F401 + from objectstore_client import client, metadata, metrics # noqa: F401 diff --git a/uv.lock b/uv.lock index efdd1dc1..abb8e256 100644 --- a/uv.lock +++ b/uv.lock @@ -160,6 +160,18 @@ dev = [ name = "objectstore-client" version = "0.1.0" source = { editable = "python-objectstore-client" } +dependencies = [ + { name = "sentry-sdk" }, + { name = "urllib3" }, + { name = "zstandard" }, +] + +[package.metadata] +requires-dist = [ + { name = "sentry-sdk", specifier = ">=2.42.1" }, + { name = "urllib3", specifier = ">=2.5.0" }, + { name = "zstandard", specifier = ">=0.18.0" }, +] [[package]] name = "packaging" @@ -366,3 +378,43 @@ sdist = { url = "https://files.pythonhosted.org/packages/20/28/e6f1a6f655d620846 wheels = [ { url = "https://files.pythonhosted.org/packages/79/0c/c05523fa3181fdf0c9c52a6ba91a23fbf3246cc095f26f6516f9c60e6771/virtualenv-20.35.4-py3-none-any.whl", hash = "sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b", size = 6005095, upload-time = "2025-10-29T06:57:37.598Z" }, ] + +[[package]] +name = "zstandard" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/0b/8df9c4ad06af91d39e94fa96cc010a24ac4ef1378d3efab9223cc8593d40/zstandard-0.25.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec996f12524f88e151c339688c3897194821d7f03081ab35d31d1e12ec975e94", size = 795735, upload-time = "2025-09-14T22:17:26.042Z" }, + { url = "https://files.pythonhosted.org/packages/3f/06/9ae96a3e5dcfd119377ba33d4c42a7d89da1efabd5cb3e366b156c45ff4d/zstandard-0.25.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a1a4ae2dec3993a32247995bdfe367fc3266da832d82f8438c8570f989753de1", size = 640440, upload-time = "2025-09-14T22:17:27.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/14/933d27204c2bd404229c69f445862454dcc101cd69ef8c6068f15aaec12c/zstandard-0.25.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:e96594a5537722fdfb79951672a2a63aec5ebfb823e7560586f7484819f2a08f", size = 5343070, upload-time = "2025-09-14T22:17:28.896Z" }, + { url = "https://files.pythonhosted.org/packages/6d/db/ddb11011826ed7db9d0e485d13df79b58586bfdec56e5c84a928a9a78c1c/zstandard-0.25.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bfc4e20784722098822e3eee42b8e576b379ed72cca4a7cb856ae733e62192ea", size = 5063001, upload-time = "2025-09-14T22:17:31.044Z" }, + { url = "https://files.pythonhosted.org/packages/db/00/87466ea3f99599d02a5238498b87bf84a6348290c19571051839ca943777/zstandard-0.25.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:457ed498fc58cdc12fc48f7950e02740d4f7ae9493dd4ab2168a47c93c31298e", size = 5394120, upload-time = "2025-09-14T22:17:32.711Z" }, + { url = "https://files.pythonhosted.org/packages/2b/95/fc5531d9c618a679a20ff6c29e2b3ef1d1f4ad66c5e161ae6ff847d102a9/zstandard-0.25.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:fd7a5004eb1980d3cefe26b2685bcb0b17989901a70a1040d1ac86f1d898c551", size = 5451230, upload-time = "2025-09-14T22:17:34.41Z" }, + { url = "https://files.pythonhosted.org/packages/63/4b/e3678b4e776db00f9f7b2fe58e547e8928ef32727d7a1ff01dea010f3f13/zstandard-0.25.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e735494da3db08694d26480f1493ad2cf86e99bdd53e8e9771b2752a5c0246a", size = 5547173, upload-time = "2025-09-14T22:17:36.084Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d5/ba05ed95c6b8ec30bd468dfeab20589f2cf709b5c940483e31d991f2ca58/zstandard-0.25.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3a39c94ad7866160a4a46d772e43311a743c316942037671beb264e395bdd611", size = 5046736, upload-time = "2025-09-14T22:17:37.891Z" }, + { url = "https://files.pythonhosted.org/packages/50/d5/870aa06b3a76c73eced65c044b92286a3c4e00554005ff51962deef28e28/zstandard-0.25.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:172de1f06947577d3a3005416977cce6168f2261284c02080e7ad0185faeced3", size = 5576368, upload-time = "2025-09-14T22:17:40.206Z" }, + { url = "https://files.pythonhosted.org/packages/5d/35/398dc2ffc89d304d59bc12f0fdd931b4ce455bddf7038a0a67733a25f550/zstandard-0.25.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c83b0188c852a47cd13ef3bf9209fb0a77fa5374958b8c53aaa699398c6bd7b", size = 4954022, upload-time = "2025-09-14T22:17:41.879Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5c/36ba1e5507d56d2213202ec2b05e8541734af5f2ce378c5d1ceaf4d88dc4/zstandard-0.25.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1673b7199bbe763365b81a4f3252b8e80f44c9e323fc42940dc8843bfeaf9851", size = 5267889, upload-time = "2025-09-14T22:17:43.577Z" }, + { url = "https://files.pythonhosted.org/packages/70/e8/2ec6b6fb7358b2ec0113ae202647ca7c0e9d15b61c005ae5225ad0995df5/zstandard-0.25.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0be7622c37c183406f3dbf0cba104118eb16a4ea7359eeb5752f0794882fc250", size = 5433952, upload-time = "2025-09-14T22:17:45.271Z" }, + { url = "https://files.pythonhosted.org/packages/7b/01/b5f4d4dbc59ef193e870495c6f1275f5b2928e01ff5a81fecb22a06e22fb/zstandard-0.25.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5f5e4c2a23ca271c218ac025bd7d635597048b366d6f31f420aaeb715239fc98", size = 5814054, upload-time = "2025-09-14T22:17:47.08Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/fbd822d5c6f427cf158316d012c5a12f233473c2f9c5fe5ab1ae5d21f3d8/zstandard-0.25.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f187a0bb61b35119d1926aee039524d1f93aaf38a9916b8c4b78ac8514a0aaf", size = 5360113, upload-time = "2025-09-14T22:17:48.893Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/69a553d2047f9a2c7347caa225bb3a63b6d7704ad74610cb7823baa08ed7/zstandard-0.25.0-cp313-cp313-win32.whl", hash = "sha256:7030defa83eef3e51ff26f0b7bfb229f0204b66fe18e04359ce3474ac33cbc09", size = 436936, upload-time = "2025-09-14T22:17:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/d9/82/b9c06c870f3bd8767c201f1edbdf9e8dc34be5b0fbc5682c4f80fe948475/zstandard-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:1f830a0dac88719af0ae43b8b2d6aef487d437036468ef3c2ea59c51f9d55fd5", size = 506232, upload-time = "2025-09-14T22:17:50.402Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/60c3c01243bb81d381c9916e2a6d9e149ab8627c0c7d7abb2d73384b3c0c/zstandard-0.25.0-cp313-cp313-win_arm64.whl", hash = "sha256:85304a43f4d513f5464ceb938aa02c1e78c2943b29f44a750b48b25ac999a049", size = 462671, upload-time = "2025-09-14T22:17:51.533Z" }, + { url = "https://files.pythonhosted.org/packages/3d/5c/f8923b595b55fe49e30612987ad8bf053aef555c14f05bb659dd5dbe3e8a/zstandard-0.25.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e29f0cf06974c899b2c188ef7f783607dbef36da4c242eb6c82dcd8b512855e3", size = 795887, upload-time = "2025-09-14T22:17:54.198Z" }, + { url = "https://files.pythonhosted.org/packages/8d/09/d0a2a14fc3439c5f874042dca72a79c70a532090b7ba0003be73fee37ae2/zstandard-0.25.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:05df5136bc5a011f33cd25bc9f506e7426c0c9b3f9954f056831ce68f3b6689f", size = 640658, upload-time = "2025-09-14T22:17:55.423Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/8b6b71b1ddd517f68ffb55e10834388d4f793c49c6b83effaaa05785b0b4/zstandard-0.25.0-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f604efd28f239cc21b3adb53eb061e2a205dc164be408e553b41ba2ffe0ca15c", size = 5379849, upload-time = "2025-09-14T22:17:57.372Z" }, + { url = "https://files.pythonhosted.org/packages/a4/86/a48e56320d0a17189ab7a42645387334fba2200e904ee47fc5a26c1fd8ca/zstandard-0.25.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223415140608d0f0da010499eaa8ccdb9af210a543fac54bce15babbcfc78439", size = 5058095, upload-time = "2025-09-14T22:17:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ad/eb659984ee2c0a779f9d06dbfe45e2dc39d99ff40a319895df2d3d9a48e5/zstandard-0.25.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e54296a283f3ab5a26fc9b8b5d4978ea0532f37b231644f367aa588930aa043", size = 5551751, upload-time = "2025-09-14T22:18:01.618Z" }, + { url = "https://files.pythonhosted.org/packages/61/b3/b637faea43677eb7bd42ab204dfb7053bd5c4582bfe6b1baefa80ac0c47b/zstandard-0.25.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca54090275939dc8ec5dea2d2afb400e0f83444b2fc24e07df7fdef677110859", size = 6364818, upload-time = "2025-09-14T22:18:03.769Z" }, + { url = "https://files.pythonhosted.org/packages/31/dc/cc50210e11e465c975462439a492516a73300ab8caa8f5e0902544fd748b/zstandard-0.25.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e09bb6252b6476d8d56100e8147b803befa9a12cea144bbe629dd508800d1ad0", size = 5560402, upload-time = "2025-09-14T22:18:05.954Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ae/56523ae9c142f0c08efd5e868a6da613ae76614eca1305259c3bf6a0ed43/zstandard-0.25.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a9ec8c642d1ec73287ae3e726792dd86c96f5681eb8df274a757bf62b750eae7", size = 4955108, upload-time = "2025-09-14T22:18:07.68Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/c899f2d6df0840d5e384cf4c4121458c72802e8bda19691f3b16619f51e9/zstandard-0.25.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a4089a10e598eae6393756b036e0f419e8c1d60f44a831520f9af41c14216cf2", size = 5269248, upload-time = "2025-09-14T22:18:09.753Z" }, + { url = "https://files.pythonhosted.org/packages/1b/c0/59e912a531d91e1c192d3085fc0f6fb2852753c301a812d856d857ea03c6/zstandard-0.25.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f67e8f1a324a900e75b5e28ffb152bcac9fbed1cc7b43f99cd90f395c4375344", size = 5430330, upload-time = "2025-09-14T22:18:11.966Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/7e31db1240de2df22a58e2ea9a93fc6e38cc29353e660c0272b6735d6669/zstandard-0.25.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:9654dbc012d8b06fc3d19cc825af3f7bf8ae242226df5f83936cb39f5fdc846c", size = 5811123, upload-time = "2025-09-14T22:18:13.907Z" }, + { url = "https://files.pythonhosted.org/packages/f6/49/fac46df5ad353d50535e118d6983069df68ca5908d4d65b8c466150a4ff1/zstandard-0.25.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4203ce3b31aec23012d3a4cf4a2ed64d12fea5269c49aed5e4c3611b938e4088", size = 5359591, upload-time = "2025-09-14T22:18:16.465Z" }, + { url = "https://files.pythonhosted.org/packages/c2/38/f249a2050ad1eea0bb364046153942e34abba95dd5520af199aed86fbb49/zstandard-0.25.0-cp314-cp314-win32.whl", hash = "sha256:da469dc041701583e34de852d8634703550348d5822e66a0c827d39b05365b12", size = 444513, upload-time = "2025-09-14T22:18:20.61Z" }, + { url = "https://files.pythonhosted.org/packages/3a/43/241f9615bcf8ba8903b3f0432da069e857fc4fd1783bd26183db53c4804b/zstandard-0.25.0-cp314-cp314-win_amd64.whl", hash = "sha256:c19bcdd826e95671065f8692b5a4aa95c52dc7a02a4c5a0cac46deb879a017a2", size = 516118, upload-time = "2025-09-14T22:18:17.849Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ef/da163ce2450ed4febf6467d77ccb4cd52c4c30ab45624bad26ca0a27260c/zstandard-0.25.0-cp314-cp314-win_arm64.whl", hash = "sha256:d7541afd73985c630bafcd6338d2518ae96060075f9463d7dc14cfb33514383d", size = 476940, upload-time = "2025-09-14T22:18:19.088Z" }, +]