Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

try to start running cleanup so we can test restore to flushed systems #481

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 18 additions & 4 deletions release_tester/arangodb/sh.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,10 +390,20 @@ def check_test_data(
return ret

@step
def clear_test_data(self, testname, args=None, result_line_handler=default_line_result):
def clear_test_data(
self,
testname,
supports_foxx_tests,
args=None,
one_shard: bool = False,
database_name: str = "_system",
result_line_handler=default_line_result):
"""flush the testdata from the instance again"""
if args is None:
args = []
args = [database_name] + args
if one_shard:
args += ["--singleShard", "true"]
if testname:
logging.info("removing test data for {0}".format(testname))
else:
Expand All @@ -406,9 +416,13 @@ def clear_test_data(self, testname, args=None, result_line_handler=default_line_
cmd=[
"cleaning up test data",
self.cfg.test_data_dir.resolve() / "cleardata.js",
]
+ test_filter,
args=args + ["--progress", "true"],
],
args=args + [
'--progress', 'true',
'--oldVersion', self.old_version,
'--testFoxx', 'true' if supports_foxx_tests else 'false',
'--passvoid', self.cfg.passvoid
] + test_filter,
progressive_timeout=5,
result_line_handler=result_line_handler,
)
Expand Down
65 changes: 62 additions & 3 deletions release_tester/arangodb/starter/deployments/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,6 +352,7 @@ def run(self):
bound = 1 if is_single_test else versions_count - 1

for i in range(0, bound):
print('yyyyyyyyyyyyyyyyyyyyyyy')
self.old_installer = self.installers[i][1]
if i == 0:
# if i != 0, it means that self.cfg was already updated after chain-upgrade
Expand Down Expand Up @@ -385,6 +386,10 @@ def run(self):
self.finish_setup()
if self.create_oneshard_db:
self.custom_databases.append(["system_oneshard_makedata", True, 1])
if self.hot_backup:
self.progress(False, "TESTING empty HOTBACKUP")
self.empty_backup_name = self.create_backup("empty_" + self.name)

self.make_data()
self.after_makedata_check()
self.check_data_impl()
Expand All @@ -403,10 +408,16 @@ def run(self):
self.tcp_ping_all_nodes()
self.create_non_backup_data()
taken_backups = self.list_backup()
backup_no = len(taken_backups) - 1
self.upload_backup(taken_backups[backup_no])
work_backup = ""
for one_backup in taken_backups:
print(one_backup)
if one_backup.startswith(self.backup_name):
work_backup = one_backup
if work_backup == "":
raise Exception("backup {self.backup_name} not found in {taken_backups}")
self.upload_backup(work_backup)
self.tcp_ping_all_nodes()
self.delete_backup(taken_backups[backup_no])
self.delete_backup(work_backup)
self.tcp_ping_all_nodes()
backups = self.list_backup()
if len(backups) != len(taken_backups) - 1:
Expand All @@ -425,6 +436,15 @@ def run(self):
self.check_data_impl()
if not self.check_non_backup_data():
raise Exception("data created after backup is still there??")
self.clear_data_impl()
print('zzzzzzzzzzzzzzzzzzzzzzzzzz')
self.restore_backup(backups[len(backups) - 1])
self.tcp_ping_all_nodes()
self.after_backup()
time.sleep(20) # TODO fix
self.check_data_impl()
if not self.check_non_backup_data():
raise Exception("data created after backup is still there??")

if self.new_installer:
if self.hot_backup:
Expand Down Expand Up @@ -891,6 +911,44 @@ def check_data_impl(self):
if not frontend_found:
raise Exception("no frontend found.")

@step
def clear_data_impl(self):
"""clear the data on the installation"""
frontend_found = False
if self.has_makedata_data:
print(self.makedata_instances)
for starter in self.makedata_instances:
if not starter.is_leader:
continue
assert starter.arangosh, "check: this starter doesn't have an arangosh!"
frontend_found = True
arangosh = starter.arangosh
for db_name, one_shard, count_offset in self.makedata_databases()[::-1]:
print(db_name, one_shard, count_offset)
try:
starter.arangosh.clear_test_data(
self.name,
starter.supports_foxx_tests,
args=["--countOffset", str(count_offset)],
database_name=db_name,
one_shard=one_shard,
)
except CliExecutionException as exc:
print("cleardata failed!")
print(exc)
if not self.cfg.verbose:
print(exc.execution_result[1])
self.ask_continue_or_exit(
f"check_data has failed for {self.name} in database {db_name} with {exc}",
exc.execution_result[1],
False,
exc,
)
print("Done cleanup")
return
if not frontend_found:
raise Exception("no frontend found.")

@step
def create_non_backup_data(self):
"""create data to be zapped by the restore operation"""
Expand Down Expand Up @@ -1040,6 +1098,7 @@ def search_for_warnings(self, print_lines=True):
@step
def zip_test_dir(self):
"""💾 store the test directory for later analysis"""
return #TODO
build_number = os.environ.get("BUILD_NUMBER")
if build_number:
build_number = "_" + build_number
Expand Down