diff --git a/Dockerfile b/Dockerfile index 67a6a42d..4b4bb58e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:18.04 +FROM ubuntu:20.04 ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt-get install -y software-properties-common diff --git a/lvmpy b/lvmpy index 8ee051bf..8ef6f707 160000 --- a/lvmpy +++ b/lvmpy @@ -1 +1 @@ -Subproject commit 8ee051bf24aa3feecc0ef97fb5eec970eb068512 +Subproject commit 8ef6f7070f73c1ab1aeb1405d90ededcb9ee4bd4 diff --git a/node_cli/cli/__init__.py b/node_cli/cli/__init__.py index 18f56790..c01e9839 100644 --- a/node_cli/cli/__init__.py +++ b/node_cli/cli/__init__.py @@ -1,4 +1,4 @@ -__version__ = '2.3.0' +__version__ = '2.3.1' if __name__ == "__main__": print(__version__) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index eec6c59a..e407f8d2 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -108,10 +108,11 @@ def init_node(env_file): @click.option('--yes', is_flag=True, callback=abort_if_false, expose_value=False, prompt='Are you sure you want to update SKALE node software?') +@click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) @click.argument('env_file') @streamed_cmd -def update_node(env_file): - update(env_file) +def update_node(env_file, pull_config_for_schain): + update(env_file, pull_config_for_schain) @node.command('signature', help='Get node signature for given validator id') @@ -137,9 +138,15 @@ def backup_node(backup_folder_path): is_flag=True, hidden=True ) +@click.option( + '--config-only', + help='Only restore configuration files in .skale and artifacts', + is_flag=True, + hidden=True +) @streamed_cmd -def restore_node(backup_path, env_file, no_snapshot): - restore(backup_path, env_file, no_snapshot) +def restore_node(backup_path, env_file, no_snapshot, config_only): + restore(backup_path, env_file, no_snapshot, config_only) @node.command('maintenance-on', help="Set SKALE node into maintenance mode") diff --git a/node_cli/cli/schains.py b/node_cli/cli/schains.py index cd4a7e00..06e289a6 100644 --- a/node_cli/cli/schains.py +++ b/node_cli/cli/schains.py @@ -25,6 +25,8 @@ from node_cli.core.schains import ( describe, get_schain_firewall_rules, + get_schains_by_artifacts, + restore_schain_from_snapshot, show_config, show_dkg_info, show_schains, @@ -43,8 +45,17 @@ def schains() -> None: @schains.command(help="List of sChains served by connected node") -def ls() -> None: - show_schains() +@click.option( + '-n', '--names', + help='Shows only chain names', + is_flag=True +) +def ls(names: bool) -> None: + if names: + schains: str = get_schains_by_artifacts() + print(schains) + else: + show_schains() @schains.command(help="DKG statuses for each sChain on the node") @@ -95,3 +106,17 @@ def repair(schain_name: str, snapshot_from: Optional[str] = None) -> None: ) def info_(schain_name: str, json_format: bool) -> None: describe(schain_name, raw=json_format) + + +@schains.command('restore', help='Restore schain from local snapshot') +@click.argument('schain_name') +@click.argument('snapshot_path') +@click.option('--schain-type', default='medium') +@click.option('--env-type', default=None) +def restore( + schain_name: str, + snapshot_path: str, + schain_type: str, + env_type: Optional[str] +) -> None: + restore_schain_from_snapshot(schain_name, snapshot_path) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index c8f71d35..579cbe58 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -41,6 +41,8 @@ SKALE_TMP_DIR = os.path.join(SKALE_DIR, '.tmp') NODE_DATA_PATH = os.path.join(SKALE_DIR, 'node_data') +SCHAIN_NODE_DATA_PATH = os.path.join(NODE_DATA_PATH, 'schains') +NODE_CONFIG_PATH = os.path.join(NODE_DATA_PATH, 'node_config.json') CONTAINER_CONFIG_PATH = os.path.join(SKALE_DIR, 'config') CONTAINER_CONFIG_TMP_PATH = os.path.join(SKALE_TMP_DIR, 'config') CONTRACTS_PATH = os.path.join(SKALE_DIR, 'contracts_info') @@ -52,7 +54,8 @@ SGX_CERTIFICATES_DIR_NAME = 'sgx_certs' COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml') -STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml') +STATIC_PARAMS_FILEPATH = os.path.join( + CONTAINER_CONFIG_PATH, 'static_params.yaml') NGINX_TEMPLATE_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'nginx.conf.j2') NGINX_CONFIG_FILEPATH = os.path.join(NODE_DATA_PATH, 'nginx.conf') diff --git a/node_cli/core/node.py b/node_cli/core/node.py index b8aa637d..ce369bc8 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -143,7 +143,7 @@ def init(env_filepath): @check_not_inited -def restore(backup_path, env_filepath, no_snapshot=False): +def restore(backup_path, env_filepath, no_snapshot=False, config_only=False): env = get_node_env(env_filepath) if env is None: return @@ -154,7 +154,7 @@ def restore(backup_path, env_filepath, no_snapshot=False): logger.info('Adding BACKUP_RUN to env ...') env['BACKUP_RUN'] = 'True' # should be str - restored_ok = restore_op(env, backup_path) + restored_ok = restore_op(env, backup_path, config_only=config_only) if not restored_ok: error_exit( 'Restore operation failed', @@ -167,7 +167,12 @@ def restore(backup_path, env_filepath, no_snapshot=False): print('Node is restored from backup') -def get_node_env(env_filepath, inited_node=False, sync_schains=None): +def get_node_env( + env_filepath, + inited_node=False, + sync_schains=None, + pull_config_for_schain=None +): if env_filepath is not None: env_params = extract_env_params(env_filepath) if env_params is None: @@ -186,15 +191,22 @@ def get_node_env(env_filepath, inited_node=False, sync_schains=None): env['FLASK_SECRET_KEY'] = flask_secret_key if sync_schains: env['BACKUP_RUN'] = 'True' + if pull_config_for_schain: + env['PULL_CONFIG_FOR_SCHAIN'] = pull_config_for_schain return {k: v for k, v in env.items() if v != ''} @check_inited @check_user -def update(env_filepath): +def update(env_filepath, pull_config_for_schain): logger.info('Node update started') configure_firewall_rules() - env = get_node_env(env_filepath, inited_node=True, sync_schains=False) + env = get_node_env( + env_filepath, + inited_node=True, + sync_schains=False, + pull_config_for_schain=pull_config_for_schain + ) update_ok = update_op(env_filepath, env) if update_ok: logger.info('Waiting for containers initialization') @@ -345,26 +357,30 @@ def is_base_containers_alive(): return len(skale_containers) >= BASE_CONTAINERS_AMOUNT -def get_node_info(format): +def get_node_info_plain(): status, payload = get_request( blueprint=BLUEPRINT_NAME, method='info' ) if status == 'ok': - node_info = payload['node_info'] - if format == 'json': - print(node_info) - elif node_info['status'] == NodeStatuses.NOT_CREATED.value: - print(TEXTS['service']['node_not_registered']) - else: - print_node_info( - node_info, - get_node_status(int(node_info['status'])) - ) + return payload['node_info'] else: error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) +def get_node_info(format): + node_info = get_node_info_plain() + if format == 'json': + print(node_info) + elif node_info['status'] == NodeStatuses.NOT_CREATED.value: + print(TEXTS['service']['node_not_registered']) + else: + print_node_info( + node_info, + get_node_status(int(node_info['status'])) + ) + + def get_node_status(status): node_status = NodeStatuses(status).name return TEXTS['node']['status'][node_status] diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 84a558ad..f9fa64aa 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -1,9 +1,24 @@ import logging +import os import pprint +import shutil +from pathlib import Path -from typing import Optional +from typing import Dict, Optional -from node_cli.utils.helper import get_request, post_request, error_exit +from node_cli.configs import ( + ALLOCATION_FILEPATH, + NODE_CONFIG_PATH, + SCHAIN_NODE_DATA_PATH +) +from node_cli.configs.env import get_env_config + +from node_cli.utils.helper import ( + get_request, + error_exit, + safe_load_yml, + post_request +) from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.print_formatters import ( print_dkg_statuses, @@ -11,6 +26,9 @@ print_schain_info, print_schains ) +from node_cli.utils.docker_utils import ensure_volume, is_volume_exists +from node_cli.utils.helper import read_json, run_cmd +from lvmpy.src.core import mount, volume_mountpoint logger = logging.getLogger(__name__) @@ -40,7 +58,8 @@ def show_schains() -> None: if not schains: print('No sChains found') return - print_schains(schains) + else: + print_schains(schains) else: error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) @@ -98,3 +117,106 @@ def describe(schain: str, raw=False) -> None: print_schain_info(payload, raw=raw) else: error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +def btrfs_set_readonly_false(subvolume_path: str) -> None: + run_cmd(['btrfs', 'property', 'set', '-ts', subvolume_path, 'ro', 'false']) + + +def btrfs_receive_binary(src_path: str, binary_path: str) -> None: + run_cmd(['btrfs', 'receive', '-f', binary_path, src_path]) + + +def get_block_number_from_path(snapshot_path: str) -> int: + stem = Path(snapshot_path).stem + bn = -1 + try: + bn = int(stem.split('-')[-1]) + except ValueError: + return -1 + return bn + + +def get_node_config() -> Dict: + return read_json(NODE_CONFIG_PATH) + + +def get_node_id() -> int: + info = get_node_config() + return info['node_id'] + + +def migrate_prices_and_blocks(path: str, node_id: int) -> None: + db_suffix = '.db' + for sname in os.listdir(path): + subvolume_path = os.path.join(path, sname) + logger.debug('Processing %s', sname) + btrfs_set_readonly_false(subvolume_path) + if sname.endswith(db_suffix): + subvolume_path = os.path.join(path, sname) + dbname = sname.split('_')[0] + new_path = os.path.join(path, f'{dbname}_{node_id}{db_suffix}') + logger.debug('New path for %s %s', sname, new_path) + shutil.move(subvolume_path, new_path) + + +def make_btrfs_snapshot(src: str, dst: str) -> None: + run_cmd(['btrfs', 'subvolume', 'snapshot', src, dst]) + + +def fillin_snapshot_folder(src_path: str, block_number: int) -> None: + snapshots_dirname = 'snapshots' + snapshot_folder_path = os.path.join( + src_path, snapshots_dirname, str(block_number)) + os.makedirs(snapshot_folder_path, exist_ok=True) + for subvolume in os.listdir(src_path): + if subvolume != snapshots_dirname: + logger.debug('Copying %s to %s', subvolume, snapshot_folder_path) + subvolume_path = os.path.join(src_path, subvolume) + subvolume_snapshot_path = os.path.join( + snapshot_folder_path, subvolume) + make_btrfs_snapshot(subvolume_path, subvolume_snapshot_path) + + +def restore_schain_from_snapshot( + schain: str, + snapshot_path: str, + env_type: Optional[str] = None, + schain_type: str = 'medium' +) -> None: + if env_type is None: + env_config = get_env_config() + env_type = env_config['ENV_TYPE'] + ensure_schain_volume(schain, schain_type, env_type) + block_number = get_block_number_from_path(snapshot_path) + if block_number == -1: + logger.error('Invalid snapshot path format') + return + node_id = get_node_id() + + mount(schain) + src_path = volume_mountpoint(schain) + logger.info('Unpacking binary') + btrfs_receive_binary(src_path, snapshot_path) + logger.info('Migrating suvolumes') + migrate_prices_and_blocks(src_path, node_id) + migrate_prices_and_blocks(src_path, node_id) + logger.info('Recreating snapshot folder') + fillin_snapshot_folder(src_path, block_number) + + +def get_schains_by_artifacts() -> str: + return '\n'.join(os.listdir(SCHAIN_NODE_DATA_PATH)) + + +def get_schain_volume_size(schain_type: str, env_type: str) -> int: + alloc = safe_load_yml(ALLOCATION_FILEPATH) + return alloc[env_type]['disk'][schain_type] + + +def ensure_schain_volume(schain: str, schain_type: str, env_type: str) -> None: + if not is_volume_exists(schain): + size = get_schain_volume_size(schain_type, env_type) + ensure_volume(schain, size) + else: + logger.warning('Volume %s already exists', schain) diff --git a/node_cli/main.py b/node_cli/main.py index aa1a7e54..10009022 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -40,6 +40,7 @@ from node_cli.utils.helper import safe_load_texts, init_default_logger from node_cli.configs import LONG_LINE from node_cli.core.host import init_logs_dir +from node_cli.utils.helper import error_exit TEXTS = safe_load_texts() @@ -109,8 +110,7 @@ def handle_exception(exc_type, exc_value, exc_traceback): try: cmd_collection() except Exception as err: - print(f'Command execution failed with {err}. Recheck your inputs') traceback.print_exc() - logger.exception(f'Command failed with {err}') - finally: - logger.debug(f'execution time: {time.time() - start_time} seconds') + logger.debug('Execution time: %d seconds', time.time() - start_time) + error_exit(err) + logger.debug('Execution time: %d seconds', time.time() - start_time) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 81c1dab0..a5d68547 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -194,7 +194,7 @@ def turn_on(env): compose_up(env) -def restore(env, backup_path): +def restore(env, backup_path, config_only=False): unpack_backup_archive(backup_path) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -226,7 +226,8 @@ def restore(env, backup_path): disk_device=env['DISK_MOUNTPOINT'], env_type=env['ENV_TYPE'] ) - compose_up(env) + if not config_only: + compose_up(env) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 9a2911c8..1a3f11fb 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -158,16 +158,16 @@ def get_logs_backup_filepath(container: Container) -> str: return os.path.join(REMOVED_CONTAINERS_FOLDER_PATH, log_file_name) -def ensure_volume(name: str, size: int, dutils=None): +def ensure_volume(name: str, size: int, driver='lvmpy', dutils=None): dutils = dutils or docker_client() if is_volume_exists(name, dutils=dutils): - logger.info(f'Volume with name {name} already exits') + logger.info('Volume %s already exits', name) return - logger.info(f'Creating volume - size: {size}, name: {name}') - driver_opts = {'size': str(size)} + logger.info('Creating volume %s, size: %d', name, size) + driver_opts = {'size': str(size)} if driver == 'lvmpy' else None volume = dutils.volumes.create( name=name, - driver='lvmpy', + driver=driver, driver_opts=driver_opts ) return volume @@ -270,5 +270,4 @@ def docker_cleanup(dclient=None, ignore=None): cleanup_unused_images(dclient=dc, ignore=ignore) system_prune() except Exception as e: - logger.warning('Image cleanuping errored with %s', e) - logger.debug('Image cleanuping errored', exc_info=True) + logger.warning('Image cleanup errored with %s', e) diff --git a/setup.py b/setup.py index dc335888..ff60099a 100644 --- a/setup.py +++ b/setup.py @@ -20,14 +20,14 @@ def find_version(*file_paths): extras_require = { 'linter': [ - "flake8==5.0.4", + "flake8==6.0.0", "isort>=4.2.15,<5.10.2", ], 'dev': [ "bumpversion==0.6.0", - "pytest==7.1.2", - "pytest-cov==3.0.0", - "twine==4.0.1", + "pytest==7.2.2", + "pytest-cov==4.0.0", + "twine==4.0.2", "mock==4.0.3", "freezegun==1.2.2" ] @@ -51,24 +51,24 @@ def find_version(*file_paths): url='https://github.com/skalenetwork/node-cli', install_requires=[ "click==8.1.3", - "PyInstaller==5.6.2", + "PyInstaller==5.12.0", "distro==1.4.0", "docker==6.0.1", - "texttable==1.6.4", + "texttable==1.6.7", "python-dateutil==2.8.2", "Jinja2==3.1.2", - "psutil==5.9.1", + "psutil==5.9.4", "python-dotenv==0.21.0", "terminaltables==3.1.10", "requests==2.28.1", - "GitPython==3.1.30", - "packaging==21.3", - "python-debian==0.1.48", + "GitPython==3.1.31", + "packaging==23.0", + "python-debian==0.1.49", "python-iptables==1.0.1", "PyYAML==6.0", "MarkupSafe==2.1.1", - 'Flask==2.2.2', - 'itsdangerous==2.0.1', + 'Flask==2.3.3', + 'itsdangerous==2.1.2', 'sh==1.14.2', 'python-crontab==2.6.0' ], diff --git a/tests/core_node_test.py b/tests/core_node_test.py index 466a90ec..2ee12036 100644 --- a/tests/core_node_test.py +++ b/tests/core_node_test.py @@ -185,4 +185,4 @@ def test_update_node(mocked_g_config, resource_file): mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), \ mock.patch('node_cli.core.host.init_data_dir'): - update(env_filepath) + update(env_filepath, pull_config_for_schain=None)