Skip to content

Commit

Permalink
Implement persistent bucket fixtures for integration tests (#1046)
Browse files Browse the repository at this point in the history
* Add persistent bucket fixtures

* Refactor integration tests to use persistent bucket where applicable

* Adjust SSE-C&notifcation rules tests to use const bucket \w subfolders

* Remove account_info_file dependency in persistent bucket creation

* Clean up

* Refactor persistent bucket cleanup in tests: manual clear, remove auto-teardown, add error handling

* Add changelog

* Add full-stops

* Format

* Retry on duplicate bucket in persistent bucket get_or_create

* Improve changelog

* Don't clean up the persistent bucket's subfolder after each test case

* Rename persistent bucket fixture

* Delete forgotten pass stmt

* Change changelog catregory

* Don't clean up persistent bucket after on teardown

* Revert changes to cleanup_buckets.py

* Clean dead code
  • Loading branch information
kris-konina-reef authored Oct 1, 2024
1 parent 2f942e0 commit f768453
Show file tree
Hide file tree
Showing 5 changed files with 538 additions and 345 deletions.
1 change: 1 addition & 0 deletions changelog.d/+test_with_persistent_bucket.infrastructure.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Improve internal testing infrastructure by updating integration tests to use persistent buckets.
22 changes: 22 additions & 0 deletions test/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import subprocess
import sys
import tempfile
import uuid
from os import environ, path
from tempfile import TemporaryDirectory

Expand All @@ -31,6 +32,10 @@

from ..helpers import b2_uri_args_v3, b2_uri_args_v4
from .helpers import NODE_DESCRIPTION, RNG_SEED, Api, CommandLine, bucket_name_part, random_token
from .persistent_bucket import (
PersistentBucketAggregate,
get_or_create_persistent_bucket,
)

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -402,3 +407,20 @@ def b2_uri_args(apiver_int):
return b2_uri_args_v4
else:
return b2_uri_args_v3


# -- Persistent bucket fixtures --
@pytest.fixture
def unique_subfolder():
subfolder = f"test-{uuid.uuid4().hex[:8]}"
yield subfolder


@pytest.fixture
def persistent_bucket(unique_subfolder, b2_api) -> PersistentBucketAggregate:
"""
Since all consumers of the `bucket_name` fixture expect a new bucket to be created,
we need to mirror this behavior by appending a unique subfolder to the persistent bucket name.
"""
persistent_bucket = get_or_create_persistent_bucket(b2_api)
yield PersistentBucketAggregate(persistent_bucket.name, unique_subfolder)
5 changes: 2 additions & 3 deletions test/integration/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,6 @@ def _should_remove_bucket(self, bucket: Bucket) -> tuple[bool, str]:
def clean_buckets(self, quick=False):
# even with use_cache=True, if cache is empty API call will be made
buckets = self.api.list_buckets(use_cache=quick)
print('Total bucket count:', len(buckets))
remaining_buckets = []
for bucket in buckets:
should_remove, why = self._should_remove_bucket(bucket)
Expand Down Expand Up @@ -539,9 +538,9 @@ def reauthorize(self, check_key_capabilities=False):
} - private_preview_caps - set(auth_dict['allowed']['capabilities'])
assert not missing_capabilities, f'it appears that the raw_api integration test is being run with a non-full key. Missing capabilities: {missing_capabilities}'

def list_file_versions(self, bucket_name):
def list_file_versions(self, bucket_name, path=''):
return self.should_succeed_json(
['ls', '--json', '--recursive', '--versions', *self.b2_uri_args(bucket_name)]
['ls', '--json', '--recursive', '--versions', *self.b2_uri_args(bucket_name, path)]
)

def cleanup_buckets(self, buckets: dict[str, dict | None]) -> None:
Expand Down
63 changes: 63 additions & 0 deletions test/integration/persistent_bucket.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
######################################################################
#
# File: test/integration/persistent_bucket.py
#
# Copyright 2024 Backblaze Inc. All Rights Reserved.
#
# License https://www.backblaze.com/using_b2_code.html
#
######################################################################
import hashlib
import os
from dataclasses import dataclass
from functools import cached_property
from test.integration.helpers import BUCKET_NAME_LENGTH, Api

import backoff
from b2sdk.v2 import Bucket
from b2sdk.v2.exception import DuplicateBucketName, NonExistentBucket

PERSISTENT_BUCKET_NAME_PREFIX = "constst"


@dataclass
class PersistentBucketAggregate:
bucket_name: str
subfolder: str

@cached_property
def virtual_bucket_name(self):
return f"{self.bucket_name}/{self.subfolder}"


def get_persistent_bucket_name(b2_api: Api) -> str:
bucket_base = os.environ.get("GITHUB_REPOSITORY_ID", b2_api.api.get_account_id())
bucket_hash = hashlib.sha256(bucket_base.encode()).hexdigest()
return f"{PERSISTENT_BUCKET_NAME_PREFIX}-{bucket_hash}" [:BUCKET_NAME_LENGTH]


@backoff.on_exception(
backoff.expo,
DuplicateBucketName,
max_tries=3,
jitter=backoff.full_jitter,
)
def get_or_create_persistent_bucket(b2_api: Api) -> Bucket:
bucket_name = get_persistent_bucket_name(b2_api)
try:
bucket = b2_api.api.get_bucket_by_name(bucket_name)
except NonExistentBucket:
bucket = b2_api.api.create_bucket(
bucket_name,
bucket_type="allPublic",
lifecycle_rules=[
{
"daysFromHidingToDeleting": 1,
"daysFromUploadingToHiding": 1,
"fileNamePrefix": "",
}
],
)
# add the new bucket name to the list of bucket names
b2_api.bucket_name_log.append(bucket_name)
return bucket
Loading

0 comments on commit f768453

Please sign in to comment.