diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..d5646fbb --- /dev/null +++ b/.flake8 @@ -0,0 +1,3 @@ +[flake8] +max-line-length = 100 +exclude = .git,__pycache__,docs/source/conf.py,old,build,dist,venv \ No newline at end of file diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..cc9b1930 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,61 @@ +language: python +python: + - '3.6' +cache: pip +install: + - pip install -r requirements.txt + - pip install -r requirements-dev.txt +before_script: + - "flake8 ." +jobs: + include: + #- stage: test + - stage: deploy + if: branch IN (develop, beta, stable, master) + script: + - VERSION=$(BRANCH=$TRAVIS_BRANCH bash ./scripts/calculate_version.sh) + - echo "Version $VERSION" + - bash ./scripts/build.sh $VERSION $TRAVIS_BRANCH + - export OS=`uname -s`-`uname -m` + - export EXECUTABLE_NAME=skale-$VERSION-$OS + before_deploy: + # Set up git user name and tag this commit + - ( + test ! $TRAVIS_TAG && + git config --local user.name "skale-travis" && + git config --local user.email "$GITHUB_EMAIL" && + export TRAVIS_TAG=$VERSION && + git tag "$TRAVIS_TAG" && + git push https://$GITHUB_OAUTH_TOKEN@github.com/$TRAVIS_REPO_SLUG.git $TRAVIS_TAG + ) || true + deploy: + - provider: releases + api_key: "$GITHUB_OAUTH_TOKEN" + skip_cleanup: true + file: + - dist/$EXECUTABLE_NAME + on: + repo: skalenetwork/skaled + branch: stable + - provider: releases + api_key: "$GITHUB_OAUTH_TOKEN" + skip_cleanup: true + prerelease: true + file: + - dist/$EXECUTABLE_NAME + on: + repo: $TRAVIS_REPO_SLUG + branch: + - master + - develop + - beta + - provider: script + skip_cleanup: true + script: bash $TRAVIS_BUILD_DIR/scripts/upload_to_do.sh + on: + repo: $TRAVIS_REPO_SLUG + branch: + - master + - stable + - develop + - beta \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 74e7e36a..7a9a3582 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Exception handler now logs error stacktrace - `--endpoint` option for `node init` and `node update` commands - `skale info` command with information about current build +- `skale logs container` command that fetches logs from one of the node containers +- `skale logs dump` command that dumps all logs from the connected node + ### Changed diff --git a/README.md b/README.md index 367987f7..a6794b24 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,8 @@ # SKALE node CLI +[![Build Status](https://travis-ci.com/skalenetwork/skale-node-cli.svg?token=tLesVRTSHvWZxoyqXdoA&branch=develop)](https://travis-ci.com/skalenetwork/skale-node-cli) +[![Discord](https://img.shields.io/discord/534485763354787851.svg)](https://discord.gg/vvUtWJB) + SKALE Node CLI, part of the SKALE suite of validator tools, is the command line to setup, register and maintain your SKALE node. ## Table of Contents @@ -370,6 +373,31 @@ Options: - `--debug` - show debug logs; more detailed output +##### Container Logs (from 0.2.1) + +Fetch logs from one of the node containers: + +```bash +skale logs container [NAME] +``` + +Optional arguments: + +- `--lines`, `-l` - Output specified number of lines at the end of logs + + +##### Dump Logs (from 0.2.2) + +Dump all logs from the connected node: + +```bash +skale logs dump [PATH] +``` + +Optional arguments: + +- `--container`, `-c` - Dump logs only from specified container + ### Validator commands (not implemented yet) @@ -385,23 +413,37 @@ skale validator list ## Development -Requirements: -- PyInstaller 3.5+ +### Setup repo -Create release: +##### Install development dependencies ```bash -bash build.sh patch/minor/major/keep +pip install -r requirements-dev.txt ``` -Build executable: +##### Add flake8 git hook + +In file `.git/hooks/pre-commit` add: ```bash -pyinstaller --onefile main.spec +#!/bin/sh +flake8 . ``` +### Debugging + Run commands in dev mode: ```bash ENV=dev python main.py YOUR_COMMAND ``` + +### Setting up Travis + +Required environment variables: + +- `ACCESS_KEY_ID` - DO Spaces/AWS S3 API Key ID +- `SECRET_ACCESS_KEY` - DO Spaces/AWS S3 Secret access key +- `GITHUB_EMAIL` - Email of GitHub user +- `GITHUB_OAUTH_TOKEN` - GitHub auth token + diff --git a/cli/__init__.py b/cli/__init__.py index 67b62d34..fb3f3d03 100644 --- a/cli/__init__.py +++ b/cli/__init__.py @@ -1,4 +1,4 @@ -__version__ = '0.2.0' +__version__ = '0.3.0' if __name__ == "__main__": print(__version__) diff --git a/cli/containers.py b/cli/containers.py index 8c228bd2..d1b87f7c 100644 --- a/cli/containers.py +++ b/cli/containers.py @@ -28,5 +28,6 @@ def schains(all): @login_required def ls(all): containers_list = get('skale_containers', {'all': all}) - if not containers_list: return + if not containers_list: + return print_containers(containers_list) diff --git a/cli/logs.py b/cli/logs.py index f437438a..661c7e72 100644 --- a/cli/logs.py +++ b/cli/logs.py @@ -1,9 +1,11 @@ import click -from core.helper import login_required, get, download_log_file, local_only +from core.helper import (login_required, get, download_log_file, + local_only, download_dump) from core.print_formatters import print_logs from configs.cli_logger import LOG_FILEPATH, DEBUG_LOG_FILEPATH + @click.group() def logs_cli(): pass @@ -43,3 +45,35 @@ def cli(debug): filepath = DEBUG_LOG_FILEPATH if debug else LOG_FILEPATH with open(filepath, 'r') as fin: print(fin.read()) + + +@logs.command(help="Download log file from container on the connected node") +@click.argument('name') +@click.option( + '--lines', + '-l', + help='Output specified number of lines at the end of logs', + default=None +) +@login_required +def container(name, lines): + params = {'container_name': name} + if lines: + params['lines'] = lines + container_logs = get('container_logs', params) + print(container_logs) + + +@logs.command(help="Dump all logs from the connected node") +@click.option( + '--container', + '-c', + help='Dump logs only from specified container', + default=None +) +@click.argument('path') +@login_required +def dump(container, path): + res = download_dump(path, container) + if res: + print(f'File {res} downloaded') diff --git a/cli/metrics.py b/cli/metrics.py new file mode 100644 index 00000000..b004d9cf --- /dev/null +++ b/cli/metrics.py @@ -0,0 +1,35 @@ +import click +from core.helper import login_required, get +from core.print_formatters import print_metrics + + +@click.group() +def metrics_cli(): + pass + + +@metrics_cli.group('metrics', help="Node metrics commands") +def metrics(): + pass + + +@metrics.command(help="List of bounties and metrics for the first year") +@login_required +def first(): + print('Please wait - collecting metrics from blockchain...') + bounty_list = get('first-bounties') + if not bounty_list: + print('No bounties found') + return + print_metrics(bounty_list) + + +@metrics.command(help="List of bounties and metrics for the last year") +@login_required +def last(): + print('Please wait - collecting metrics from blockchain...') + bounty_list = get('last-bounties') + if not bounty_list: + print('No bounties found') + return + print_metrics(bounty_list) diff --git a/cli/node.py b/cli/node.py index 16edb229..d7374cd9 100644 --- a/cli/node.py +++ b/cli/node.py @@ -1,12 +1,16 @@ +import ipaddress +from urllib.parse import urlparse + import click from readsettings import ReadSettings from skale.utils.random_names.generator import generate_random_node_name from core.core import get_node_info, get_node_about -from core.node import create_node, init, purge, deregister, update +from core.node import create_node, init, purge, update from core.host import install_host_dependencies -from core.helper import abort_if_false, local_only, login_required, safe_load_texts +from core.helper import (abort_if_false, local_only, + login_required, safe_load_texts) from core.config import CONFIG_FILEPATH, DEFAULT_RPC_IP, DEFAULT_RPC_PORT, \ DEFAULT_DB_USER, DEFAULT_DB_PORT, DEFAULT_MTA_ENDPOINT, DEFAULT_ENDPOINT from configs.node import DEFAULT_NODE_BASE_PORT @@ -15,6 +19,30 @@ TEXTS = safe_load_texts() +class UrlType(click.ParamType): + name = 'url' + + def convert(self, value, param, ctx): + try: + result = urlparse(value) + except ValueError: + self.fail(f'Some characters are not allowed in {value}', + param, ctx) + if not all([result.scheme, result.netloc]): + self.fail(f'Expected valid url. Got {value}', param, ctx) + + +class IpType(click.ParamType): + name = 'ip' + + def convert(self, value, param, ctx): + try: + ipaddress.ip_address(value) + except ValueError: + self.fail(f'expected valid ipv4/ipv6 address. Got {value}', + param, ctx) + + @click.group() def node_cli(): pass @@ -42,7 +70,7 @@ def node_about(format): @node.command('register', help="Register current node in the SKALE Manager") @click.option( '--name', '-n', - #prompt="Enter node name", + # prompt="Enter node name", default=generate_random_node_name(), help='SKALE node name' ) @@ -59,12 +87,14 @@ def node_about(format): @click.option( '--ip', prompt="Enter node public IP", + type=IpType(), help='Public IP for RPC connections & consensus (required)' ) @click.option( '--port', '-p', default=DEFAULT_NODE_BASE_PORT, - #prompt="Enter node base port", + type=int, + # prompt="Enter node base port", help='Base port for node sChains' ) @login_required @@ -77,6 +107,7 @@ def register_node(name, ip, port): @click.option('--install-deps', is_flag=True) @click.option( # todo: tmp option - after stable release branch '--mta-endpoint', + type=UrlType(), # prompt="Enter Git branch to clone", help='MTA endpoint to connect', default=DEFAULT_MTA_ENDPOINT @@ -103,20 +134,25 @@ def register_node(name, ip, port): ) @click.option( # todo: tmp option - remove after mainnet deploy '--endpoint', + type=UrlType(), # prompt="Enter Mainnet RPC port", - help='RPC endpoint of the node in the network where SKALE manager is deployed', + help='RPC endpoint of the node in the network ' + 'where SKALE manager is deployed', default=DEFAULT_ENDPOINT ) @click.option( # todo: tmp option - remove after mainnet deploy '--rpc-ip', + type=IpType(), # prompt="Enter Mainnet RPC IP", help='IP of the node in the network where SKALE manager is deployed', default=DEFAULT_RPC_IP ) @click.option( # todo: tmp option - remove after mainnet deploy '--rpc-port', + type=int, # prompt="Enter Mainnet RPC port", - help='WS RPC port of the node in the network where SKALE manager is deployed', + help='WS RPC port of the node in the network ' + 'where SKALE manager is deployed', default=DEFAULT_RPC_PORT ) @click.option( @@ -137,28 +173,33 @@ def register_node(name, ip, port): ) @click.option( '--db-port', + type=int, help='Port for of node internal database', default=DEFAULT_DB_PORT ) @click.option( '--disk-mountpoint', prompt="Enter data disk mount point", - help='Mount point of the disk to be used for storing sChains data (required)' + help='Mount point of the disk to be used ' + 'for storing sChains data (required)' ) @click.option( '--test-mode', is_flag=True ) @local_only -def init_node(mta_endpoint, install_deps, stream, github_token, docker_username, docker_password, endpoint, rpc_ip, - rpc_port, db_user, db_password, db_root_password, db_port, disk_mountpoint, test_mode): +def init_node(mta_endpoint, install_deps, stream, github_token, + docker_username, docker_password, endpoint, rpc_ip, + rpc_port, db_user, db_password, db_root_password, db_port, + disk_mountpoint, test_mode): if install_deps: install_host_dependencies() if not db_root_password: db_root_password = db_password git_branch = stream - init(mta_endpoint, git_branch, github_token, docker_username, docker_password, endpoint, rpc_ip, rpc_port, db_user, + init(mta_endpoint, git_branch, github_token, docker_username, + docker_password, endpoint, rpc_ip, rpc_port, db_user, db_password, db_root_password, db_port, disk_mountpoint, test_mode) @@ -174,7 +215,8 @@ def purge_node(): # @node.command('deregister', help="De-register node from the SKALE Manager") # @click.option('--yes', is_flag=True, callback=abort_if_false, # expose_value=False, -# prompt='Are you sure you want to de-register this node from SKALE Manager?') +# prompt='Are you sure you want to de-register ' +# 'this node from SKALE Manager?') # @local_only # def deregister_node(): # deregister() @@ -186,6 +228,7 @@ def purge_node(): prompt='Are you sure you want to update SKALE node software?') @click.option( # todo: tmp option - after stable release branch '--mta-endpoint', + type=UrlType(), # prompt="Enter Git branch to clone", help='MTA endpoint to connect', default=DEFAULT_MTA_ENDPOINT @@ -207,20 +250,25 @@ def purge_node(): ) @click.option( # todo: tmp option - remove after mainnet deploy '--endpoint', + type=UrlType(), # prompt="Enter Mainnet RPC port", - help='RPC endpoint of the node in the network where SKALE manager is deployed', + help='RPC endpoint of the node in the network ' + 'where SKALE manager is deployed', default=DEFAULT_ENDPOINT ) @click.option( # todo: tmp option - remove after mainnet deploy '--rpc-ip', + type=IpType(), # prompt="Enter Mainnet RPC IP", help='IP of the node in the network where SKALE manager is deployed', default=DEFAULT_RPC_IP ) @click.option( # todo: tmp option - remove after mainnet deploy '--rpc-port', + type=int, # prompt="Enter Mainnet RPC port", - help='WS RPC port of the node in the network where SKALE manager is deployed', + help='WS RPC port of the node in the network ' + 'where SKALE manager is deployed', default=DEFAULT_RPC_PORT ) @click.option( @@ -241,11 +289,16 @@ def purge_node(): ) @click.option( '--db-port', + type=int, help='Port for of node internal database', default=DEFAULT_DB_PORT ) @local_only -def update_node(mta_endpoint, github_token, docker_username, docker_password, endpoint, rpc_ip, rpc_port, db_user, db_password, db_root_password, db_port): +def update_node(mta_endpoint, github_token, docker_username, docker_password, + endpoint, rpc_ip, rpc_port, + db_user, db_password, db_root_password, db_port): if not db_root_password: db_root_password = db_password - update(mta_endpoint, github_token, docker_username, docker_password, endpoint, rpc_ip, rpc_port, db_user, db_password, db_root_password, db_port) + update(mta_endpoint, github_token, docker_username, docker_password, + endpoint, rpc_ip, rpc_port, + db_user, db_password, db_root_password, db_port) diff --git a/configs/cli_logger.py b/configs/cli_logger.py index 659465eb..c9cb5c41 100644 --- a/configs/cli_logger.py +++ b/configs/cli_logger.py @@ -1,5 +1,5 @@ import os -from configs.node import NODE_DATA_PATH +from configs.node import HOME_DIR LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' @@ -7,10 +7,9 @@ LOG_FILE_SIZE_BYTES = LOG_FILE_SIZE_MB * 1000000 LOG_BACKUP_COUNT = 1 - -LOG_DATA_PATH = os.path.join(NODE_DATA_PATH, 'log') +LOG_DATA_PATH = os.path.join(HOME_DIR, '.skale-cli-log') LOG_FILENAME = 'node-cli.log' DEBUG_LOG_FILENAME = 'debug-node-cli.log' LOG_FILEPATH = os.path.join(LOG_DATA_PATH, LOG_FILENAME) -DEBUG_LOG_FILEPATH = os.path.join(LOG_DATA_PATH, DEBUG_LOG_FILENAME) \ No newline at end of file +DEBUG_LOG_FILEPATH = os.path.join(LOG_DATA_PATH, DEBUG_LOG_FILENAME) diff --git a/configs/node.py b/configs/node.py index c8bddadc..370821fe 100644 --- a/configs/node.py +++ b/configs/node.py @@ -1,9 +1,11 @@ import os +from pathlib import Path SKALE_VOLUME_PATH = '/skale_vol' NODE_DATA_PATH = '/skale_node_data' +HOME_DIR = str(Path.home()) LOCAL_WALLET_FILENAME = 'local_wallet.json' LOCAL_WALLET_FILEPATH = os.path.join(NODE_DATA_PATH, LOCAL_WALLET_FILENAME) -DEFAULT_NODE_BASE_PORT = 10000 \ No newline at end of file +DEFAULT_NODE_BASE_PORT = 10000 diff --git a/configs/resource_allocation.py b/configs/resource_allocation.py index 08ded040..d6c18f22 100644 --- a/configs/resource_allocation.py +++ b/configs/resource_allocation.py @@ -17,10 +17,9 @@ DISK_MOUNTPOINT_FILENAME = 'disk_mountpoint.txt' DISK_MOUNTPOINT_FILEPATH = os.path.join(NODE_DATA_PATH, DISK_MOUNTPOINT_FILENAME) -#CONVOY_HELPER_SCRIPT_URL = 'https://raw.githubusercontent.com/rancher/convoy/master/tools/dm_dev_partition.sh' CONVOY_HELPER_SCRIPT_FILENAME = 'dm_dev_partition.sh' CONVOY_HELPER_SCRIPT_FILEPATH = os.path.join(THIRDPARTY_FOLDER_PATH, CONVOY_HELPER_SCRIPT_FILENAME) CONVOY_SERVICE_TEMPLATE_FILENAME = 'convoy.service.j2' CONVOY_SERVICE_TEMPLATE_PATH = os.path.join(DATAFILES_FOLDER, CONVOY_SERVICE_TEMPLATE_FILENAME) -CONVOY_SERVICE_PATH = '/etc/systemd/system/convoy.service' \ No newline at end of file +CONVOY_SERVICE_PATH = '/etc/systemd/system/convoy.service' diff --git a/core/config.py b/core/config.py index e5c86c9f..5f803b9b 100644 --- a/core/config.py +++ b/core/config.py @@ -12,13 +12,11 @@ CURRENT_FILE_LOCATION = os.path.dirname(os.path.realpath(__file__)) - if ENV == 'dev': PARDIR = os.path.join(CURRENT_FILE_LOCATION, os.pardir) else: PARDIR = os.path.join(sys._MEIPASS, 'data') - TEXT_FILE = os.path.join(PARDIR, 'text.yml') DATAFILES_FOLDER = os.path.join(PARDIR, 'datafiles') @@ -48,7 +46,11 @@ 'skale_containers': '/containers/list', 'logs': '/logs', - 'log_download': '/download-log-file' + 'logs_dump': '/logs/dump', + 'container_logs': '/container-logs', + 'log_download': '/download-log-file', + 'first-bounties': '/first-bounties', + 'last-bounties': '/last-bounties' } LONG_LINE = '-' * 50 @@ -73,4 +75,4 @@ DEFAULT_DB_PORT = '3306' HOST_OS = platform.system() -MAC_OS_SYSTEM_NAME = 'Darwin' \ No newline at end of file +MAC_OS_SYSTEM_NAME = 'Darwin' diff --git a/core/core.py b/core/core.py index 731fb3f0..37c91b9d 100644 --- a/core/core.py +++ b/core/core.py @@ -1,7 +1,7 @@ import inspect import requests from core.config import URLS, LONG_LINE -from core.helper import safe_get_config, safe_load_texts, get_node_creds, construct_url, \ +from core.helper import safe_load_texts, get_node_creds, construct_url, \ get_response_data, clean_cookies, get_request NODE_STATUSES = ['Not created', 'Requested', 'Active'] diff --git a/core/helper.py b/core/helper.py index fff2f335..cc203a71 100644 --- a/core/helper.py +++ b/core/helper.py @@ -1,5 +1,7 @@ import pickle import yaml +import os +import re import requests import shutil from functools import wraps @@ -16,12 +18,14 @@ DEBUG_LOG_FILEPATH config = ReadSettings(CONFIG_FILEPATH) +logger = logging.getLogger(__name__) def safe_get_config(config, key): try: return config[key] except KeyError as e: + logger.error(e) # print(f'No such key in config: {key}') return None @@ -115,7 +119,7 @@ def get_request(url, cookies=None, params=None): try: return requests.get(url, cookies=cookies, params=params) except requests.exceptions.ConnectionError as e: - # todo: log error + logger.error(e) print(f'Could not connect to {url}') return None @@ -124,7 +128,7 @@ def post_request(url, json, cookies=None): try: return requests.post(url, json=json, cookies=cookies) except requests.exceptions.ConnectionError as e: - # todo: log error + logger.error(e) print(f'Could not connect to {url}') return None @@ -177,6 +181,28 @@ def download_log_file(name, type, schain): return local_filename +def download_dump(path, container_name=None): + host, cookies = get_node_creds(config) + url = construct_url(host, URLS['logs_dump']) + params = {} + if container_name: + params['container_name'] = container_name + with requests.get(url, params=params, cookies=cookies, stream=True) as r: + if r is None: + return None + if r.status_code != requests.codes.ok: + print('Request failed, status code:', r.status_code) + print_err_response(r.json()) + return None + d = r.headers['Content-Disposition'] + fname_q = re.findall("filename=(.+)", d)[0] + fname = fname_q.replace('"', '') + filepath = os.path.join(path, fname) + with open(filepath, 'wb') as f: + shutil.copyfileobj(r.raw, f) + return fname + + def init_default_logger(): f_handler = get_file_handler(LOG_FILEPATH, logging.INFO) debug_f_handler = get_file_handler(DEBUG_LOG_FILEPATH, logging.DEBUG) diff --git a/core/host.py b/core/host.py index fb349551..e86d3d04 100644 --- a/core/host.py +++ b/core/host.py @@ -13,19 +13,21 @@ from configs.resource_allocation import DISK_MOUNTPOINT_FILEPATH, \ CONVOY_HELPER_SCRIPT_FILEPATH, CONVOY_SERVICE_TEMPLATE_PATH, CONVOY_SERVICE_PATH -from core.helper import safe_get_config, safe_load_texts, construct_url, clean_cookies, clean_host, get_localhost_endpoint -from tools.helper import run_cmd, process_template, get_username +from core.helper import safe_get_config, safe_load_texts, construct_url, clean_cookies, \ + clean_host, get_localhost_endpoint +from tools.helper import run_cmd, process_template TEXTS = safe_load_texts() logger = logging.getLogger(__name__) + def install_host_dependencies(): env = { **os.environ, 'SKALE_CMD': 'host_deps' } - res = subprocess.run(["sudo", "bash", DEPENDENCIES_SCRIPT], env=env) + subprocess.run(["sudo", "bash", DEPENDENCIES_SCRIPT], env=env) # todo: check execution status @@ -76,6 +78,7 @@ def prepare_host(test_mode, disk_mountpoint): if not test_mode: init_convoy(disk_mountpoint) + def init_convoy(disk_mountpoint): print(f'Installing convoy...') run_cmd(['bash', INSTALL_CONVOY_SCRIPT], shell=False) @@ -86,8 +89,9 @@ def init_convoy(disk_mountpoint): def start_convoy_daemon(disk_mountpoint): template_data = { - #'user': get_username(), - 'cmd': f'/usr/local/bin/convoy daemon --drivers devicemapper --driver-opts dm.datadev={disk_mountpoint}1 --driver-opts dm.metadatadev={disk_mountpoint}2' + # 'user': get_username(), + 'cmd': f'/usr/local/bin/convoy daemon --drivers devicemapper --driver-opts \ + dm.datadev={disk_mountpoint}1 --driver-opts dm.metadatadev={disk_mountpoint}2' } msg = f'Starting convoy daemon, template data: {template_data}' logger.info(msg), print(msg) @@ -109,10 +113,17 @@ def save_disk_mountpoint(disk_mountpoint): f.write(disk_mountpoint) +def init_logs_dir(): + safe_mk_dirs(LOG_DATA_PATH) + + def init_data_dir(): - if os.path.exists(LOG_DATA_PATH): + safe_mk_dirs(NODE_DATA_PATH) + + +def safe_mk_dirs(path): + if os.path.exists(path): return - msg = f'Creating {NODE_DATA_PATH} directory...' + msg = f'Creating {path} directory...' logger.info(msg), print(msg) - os.makedirs(NODE_DATA_PATH, exist_ok=True) - os.makedirs(LOG_DATA_PATH, exist_ok=True) + os.makedirs(path, exist_ok=True) diff --git a/core/node.py b/core/node.py index 487db0b6..72d9125f 100644 --- a/core/node.py +++ b/core/node.py @@ -5,8 +5,7 @@ from core.config import INSTALL_SCRIPT, UNINSTALL_SCRIPT, UPDATE_SCRIPT, UPDATE_NODE_PROJECT_SCRIPT from core.config import URLS from core.helper import get_node_creds, construct_url, post_request, print_err_response -from core.host import prepare_host -from core.resources import check_is_partition +from core.host import prepare_host, init_data_dir logger = logging.getLogger(__name__) @@ -22,9 +21,9 @@ def create_node(config, name, p2p_ip, public_ip, port): } url = construct_url(host, URLS['create_node']) - try: # todo: tmp fix! + try: # todo: tmp fix! response = post_request(url, data, cookies) - except: + except Exception: response = post_request(url, data, cookies) if response is None: @@ -38,7 +37,8 @@ def create_node(config, name, p2p_ip, public_ip, port): print_err_response(response.json()) -def init(mta_endpoint, git_branch, github_token, docker_username, docker_password, endpoint, rpc_ip, rpc_port, +def init(mta_endpoint, git_branch, github_token, docker_username, docker_password, endpoint, rpc_ip, + rpc_port, db_user, db_password, db_root_password, db_port, disk_mountpoint, test_mode): env = { @@ -58,9 +58,11 @@ def init(mta_endpoint, git_branch, github_token, docker_username, docker_passwor 'DISK_MOUNTPOINT': disk_mountpoint } - #if check_is_partition(disk_mountpoint): + # if check_is_partition(disk_mountpoint): # raise Exception("You provided partition path instead of disk mountpoint.") + init_data_dir() + prepare_host(test_mode, disk_mountpoint) res = subprocess.run(['bash', INSTALL_SCRIPT], env=env) logging.info(f'Node init install script result: {res.stderr}, {res.stdout}') @@ -69,7 +71,7 @@ def init(mta_endpoint, git_branch, github_token, docker_username, docker_passwor def purge(): # todo: check that node is installed - res = subprocess.run(['sudo', 'bash', UNINSTALL_SCRIPT]) + subprocess.run(['sudo', 'bash', UNINSTALL_SCRIPT]) # todo: check execution result @@ -77,9 +79,8 @@ def deregister(): pass -def update(mta_endpoint, github_token, docker_username, docker_password, endpoint, rpc_ip, rpc_port, db_user, - db_password, - db_root_password, db_port): +def update(mta_endpoint, github_token, docker_username, docker_password, endpoint, rpc_ip, rpc_port, + db_user, db_password, db_root_password, db_port): env = { **os.environ, 'MTA_ENDPOINT': mta_endpoint, @@ -98,7 +99,8 @@ def update(mta_endpoint, github_token, docker_username, docker_password, endpoin } res_update_project = subprocess.run(['sudo', '-E', 'bash', UPDATE_NODE_PROJECT_SCRIPT], env=env) logging.info( - f'Update node project script result: {res_update_project.stderr}, {res_update_project.stdout}') + f'Update node project script result: {res_update_project.stderr}, \ + {res_update_project.stdout}') res_update_node = subprocess.run(['sudo', '-E', 'bash', UPDATE_SCRIPT], env=env) logging.info( f'Update node script result: {res_update_node.stderr}, {res_update_node.stdout}') diff --git a/core/print_formatters.py b/core/print_formatters.py index b2f9ecd8..f9499c9f 100644 --- a/core/print_formatters.py +++ b/core/print_formatters.py @@ -77,6 +77,17 @@ def print_schains(schains): print(Formatter().table(headers, rows)) +def print_metrics(metrics): + headers = [ + 'Date', + 'Bounty', + 'Downtime', + 'Latency' + ] + rows = metrics['bounties'] + print(Formatter().table(headers, rows)) + + def print_logs(logs): print('Base logs\n') print_log_list(logs['base']) diff --git a/core/resources.py b/core/resources.py index 057412c5..c70dfe89 100644 --- a/core/resources.py +++ b/core/resources.py @@ -7,7 +7,8 @@ from tools.schain_types import SchainTypes from tools.helper import write_json, read_json, run_cmd, format_output from configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH, TIMES, TIMEOUT, \ - TINY_DIVIDER, SMALL_DIVIDER, MEDIUM_DIVIDER, MEMORY_FACTOR, DISK_FACTOR, DISK_MOUNTPOINT_FILEPATH + TINY_DIVIDER, SMALL_DIVIDER, MEDIUM_DIVIDER, MEMORY_FACTOR, DISK_FACTOR, \ + DISK_MOUNTPOINT_FILEPATH logger = logging.getLogger(__name__) @@ -79,7 +80,7 @@ def get_disk_alloc(disk_path): disk_size = get_disk_size(disk_path) except subprocess.CalledProcessError: raise Exception("Couldn't get disk size, check disk mountpoint option.") - #if check_is_partition(disk_path): + # if check_is_partition(disk_path): # raise Exception("You provided partition path instead of disk mountpoint.") free_space = disk_size * DISK_FACTOR return ResourceAlloc(free_space) @@ -94,18 +95,21 @@ def get_disk_size(disk_path): def construct_disk_size_cmd(disk_path): return f'sudo blockdev --getsize64 {disk_path}' - # return f'fdisk -l {disk_path} | sed -n \'1p\' | grep -oP \', \K[^,]+\' | sed -n \'1p\'' # alternate version + def check_is_partition(disk_path): res = run_cmd(['blkid', disk_path]) output = str(res.stdout) - if 'PARTUUID' in output: return True + if 'PARTUUID' in output: + return True return False + def get_allocation_option_name(schain): part_of_node = int(schain['partOfNode']) return SchainTypes(part_of_node).name + def get_disk_path(): f = open(DISK_MOUNTPOINT_FILEPATH, "r") - return f.read() \ No newline at end of file + return f.read() diff --git a/core/user.py b/core/user.py index e7df3ec2..5f268296 100644 --- a/core/user.py +++ b/core/user.py @@ -35,7 +35,7 @@ def register_user(config, username, password, token): def login_user(config, username, password): host = safe_get_config(config, 'host') if not host: - return + host = get_localhost_endpoint() data = { 'username': username, @@ -77,7 +77,7 @@ def show_registration_token(short): print(config["token"]) else: print(f'User registration token: {config["token"]}') - except FileNotFoundError as e: + except FileNotFoundError: err_msg = "Couldn't find registration tokens file. Check that node inited on this machine." logger.error(err_msg) print(err_msg) diff --git a/core/validators.py b/core/validators.py index 6cfa6b26..2845047d 100644 --- a/core/validators.py +++ b/core/validators.py @@ -1,6 +1,5 @@ -import requests -from core.config import URLS, LONG_LINE -from core.helper import get_node_creds, construct_url, get_request, print_err_response +from core.config import URLS +from core.helper import get_node_creds, construct_url, get_request def get_validators_info(config, format): @@ -16,6 +15,3 @@ def get_validators_info(config, format): if format == 'json': print(data) - else: - pass - #print_wallet_info(data) diff --git a/main.py b/main.py index 8a75f031..0ebf8111 100644 --- a/main.py +++ b/main.py @@ -10,12 +10,16 @@ from cli.containers import containers_cli from cli.logs import logs_cli from cli.node import node_cli +from cli.metrics import metrics_cli -from core.helper import login_required, safe_load_texts, local_only, no_node, init_default_logger +from core.helper import (login_required, safe_load_texts, local_only, + no_node, init_default_logger) from core.config import CONFIG_FILEPATH, LONG_LINE from core.wallet import get_wallet_info, set_wallet_by_pk -from core.user import register_user, login_user, logout_user, show_registration_token -from core.host import test_host, show_host, fix_url, reset_host, init_data_dir +from core.user import (register_user, login_user, logout_user, + show_registration_token) +from core.host import (test_host, show_host, fix_url, reset_host, + init_logs_dir) config = ReadSettings(CONFIG_FILEPATH) TEXTS = safe_load_texts() @@ -56,7 +60,8 @@ def info(): @click.option('--skip-check', is_flag=True) def attach(host, skip_check): host = fix_url(host) - if not host: return + if not host: + return if test_host(host) or skip_check: config['host'] = host logging.info(f'Attached to {host}') @@ -79,7 +84,9 @@ def user(): pass -@user.command('token', help="Show registration token if avaliable. Server-only command.") +@user.command('token', + help="Show registration token if avaliable. " + "Server-only command.") @click.option('--short', is_flag=True) @local_only def user_token(short): @@ -159,17 +166,23 @@ def handle_exception(exc_type, exc_value, exc_traceback): if issubclass(exc_type, KeyboardInterrupt): sys.__excepthook__(exc_type, exc_value, exc_traceback) return - logger.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)) + logger.error("Uncaught exception", + exc_info=(exc_type, exc_value, exc_traceback)) sys.excepthook = handle_exception if __name__ == '__main__': - init_data_dir() + init_logs_dir() init_default_logger() args = sys.argv - logger.info(f'cmd: {" ".join(str(x) for x in args)}, v.{__version__}') # todo: hide secret variables (passwords, private keys) + # todo: hide secret variables (passwords, private keys) + logger.info(f'cmd: {" ".join(str(x) for x in args)}, v.{__version__}') cmd_collection = click.CommandCollection( - sources=[cli, schains_cli, containers_cli, logs_cli, node_cli]) - cmd_collection() + sources=[cli, schains_cli, containers_cli, logs_cli, + node_cli, metrics_cli]) + try: + cmd_collection() + except Exception as err: + print(f'Command execution falied with {err}. Recheck your inputs') diff --git a/requirements-dev.txt b/requirements-dev.txt index d494411b..36bedd3c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,3 +1,4 @@ PyInstaller==3.5 -bumpversion==0.5.3 - +boto3==1.9.233 +flake8==3.7.8 +bumpversion==0.5.3 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 1ec54eda..bcd40651 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,3 +8,5 @@ Jinja2==2.10.1 skale-py==0.82.0 eth-hash==0.2.0 pycryptodome==3.8.2 +psutil==5.6.3 + diff --git a/scripts/build.sh b/scripts/build.sh new file mode 100644 index 00000000..87dae043 --- /dev/null +++ b/scripts/build.sh @@ -0,0 +1,49 @@ +#!/usr/bin/env bash + +set -e + +VERSION=$1 +BRANCH=$2 + +USAGE_MSG='Usage: build.sh [VERSION] [BRANCH]' + +if [ -z "$1" ] +then + (>&2 echo 'You should provide version') + echo $USAGE_MSG + exit 1 +fi + +if [ -z "$2" ] +then + (>&2 echo 'You should provide git branch') + echo $USAGE_MSG + exit 1 +fi + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +PARENT_DIR="$(dirname "$DIR")" + +OS=`uname -s`-`uname -m` +#CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) +LATEST_COMMIT=$(git rev-parse HEAD) +CURRENT_DATETIME="`date "+%Y-%m-%d %H:%M:%S"`"; +DIST_INFO_FILEPATH=$PARENT_DIR/cli/info.py + +touch $DIST_INFO_FILEPATH + +echo "BUILD_DATETIME = '$CURRENT_DATETIME'" > $DIST_INFO_FILEPATH +echo "COMMIT = '$LATEST_COMMIT'" >> $DIST_INFO_FILEPATH +echo "BRANCH = '$BRANCH'" >> $DIST_INFO_FILEPATH +echo "OS = '$OS'" >> $DIST_INFO_FILEPATH +echo "VERSION = '$VERSION'" >> $DIST_INFO_FILEPATH + +EXECUTABLE_NAME=skale-$VERSION-$OS + +pyinstaller --onefile main.spec --hidden-import=eth_hash.backends.pysha3 + +mv $PARENT_DIR/dist/main $PARENT_DIR/dist/$EXECUTABLE_NAME + +echo "=========================================================================================" +echo "Built node-cli v$VERSION, branch: $BRANCH" +echo "Executable: $EXECUTABLE_NAME" diff --git a/scripts/calculate_version.sh b/scripts/calculate_version.sh new file mode 100644 index 00000000..c66b7abf --- /dev/null +++ b/scripts/calculate_version.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +VERSION=$(python setup.py --version) + + +if [ -z $VERSION ]; then + echo "The base version is not set." + exit 1 +fi + +if [[ $BRANCH == 'stable' ]]; then + echo $VERSION + exit 1 +fi + +git fetch --tags + +for (( NUMBER=0; ; NUMBER++ )) +do + FULL_VERSION="$VERSION-$BRANCH.$NUMBER" + if ! [ $(git tag -l ?$FULL_VERSION) ]; then + echo "$FULL_VERSION" | tr / - + break + fi +done \ No newline at end of file diff --git a/scripts/upload_to_do.py b/scripts/upload_to_do.py new file mode 100644 index 00000000..7f5cef13 --- /dev/null +++ b/scripts/upload_to_do.py @@ -0,0 +1,42 @@ +import os +import sys +import boto3 + +session = boto3.session.Session() +LONG_LINE = '=' * 50 + + +def print_info(endpoint_url, filename, space, key): + print('Going to upload file...') + print(f'''\ +{LONG_LINE} +Endpoint: {endpoint_url} +Filename: {filename} +Space: {space} +Key: {key} +{LONG_LINE} +''') + + +def upload_file(access_key_id, secret_access_key, filename, space, key, region='sfo2', + endpoint_url=None): + if not endpoint_url: + endpoint_url = f'https://{region}.digitaloceanspaces.com' + print_info(endpoint_url, filename, space, key) + client = session.client('s3', + region_name=region, + endpoint_url=endpoint_url, + aws_access_key_id=access_key_id, + aws_secret_access_key=secret_access_key) + client.upload_file(filename, space, key) + + +if __name__ == "__main__": + ACCESS_KEY_ID = os.environ['ACCESS_KEY_ID'] + SECRET_ACCESS_KEY = os.environ['SECRET_ACCESS_KEY'] + + FILEPATH = sys.argv[1] + SPACE_NAME = sys.argv[2] + KEY = sys.argv[3] + + upload_file(ACCESS_KEY_ID, SECRET_ACCESS_KEY, FILEPATH, SPACE_NAME, KEY) diff --git a/scripts/upload_to_do.sh b/scripts/upload_to_do.sh new file mode 100644 index 00000000..641be117 --- /dev/null +++ b/scripts/upload_to_do.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +python $TRAVIS_BUILD_DIR/scripts/upload_to_do.py $TRAVIS_BUILD_DIR/dist/$EXECUTABLE_NAME skale-cli $TRAVIS_BRANCH/$EXECUTABLE_NAME \ No newline at end of file diff --git a/setup.py b/setup.py index fa09a49a..45575584 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,8 @@ def read(*parts): def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) - if version_match: return version_match.group(1) + if version_match: + return version_match.group(1) raise RuntimeError("Couldn't parse version from file.") diff --git a/tools/helper.py b/tools/helper.py index 12b8100e..bca3c2d6 100644 --- a/tools/helper.py +++ b/tools/helper.py @@ -57,5 +57,6 @@ def read_file(path): file.close() return text + def get_username(): - return os.environ.get('USERNAME') or os.environ.get('USER') \ No newline at end of file + return os.environ.get('USERNAME') or os.environ.get('USER')