diff --git a/.dockerignore b/.dockerignore index 8ce907d8e..5b0628378 100644 --- a/.dockerignore +++ b/.dockerignore @@ -8,7 +8,9 @@ **/__pycache__ **/.npm *.pyc +backups data docker/Dockerfile +docker*.yml package-lock.json **/node_modules diff --git a/.gitignore b/.gitignore index 5071ad7f7..09448f7b7 100644 --- a/.gitignore +++ b/.gitignore @@ -19,8 +19,10 @@ instance *.pyc ./certs **/node_modules +backups data lifemonitor/static/dist +lifemonitor/static/src/node_modules docker-compose.yml utils/certs/data tests/config/data/crates/*.zip diff --git a/docker-compose.base.yml b/docker-compose.base.yml index 0965a24a8..122b75d62 100644 --- a/docker-compose.base.yml +++ b/docker-compose.base.yml @@ -52,7 +52,7 @@ services: entrypoint: /bin/bash restart: "no" command: | - -c "wait-for-postgres.sh && flask init db" + -c "wait-for-postgres.sh && ./lm-admin.py db init" depends_on: - "db" env_file: *env_file diff --git a/docker-compose.test.yml b/docker-compose.test.yml index d95648482..84a688c60 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -10,7 +10,7 @@ services: restart: "no" entrypoint: /bin/bash command: | - -c "wait-for-postgres.sh && flask init db && /usr/local/bin/lm_entrypoint.sh" + -c "wait-for-postgres.sh && ./lm-admin.py db init && /usr/local/bin/lm_entrypoint.sh" environment: - "FLASK_ENV=testingSupport" - "HOME=/lm" diff --git a/docker/lifemonitor.Dockerfile b/docker/lifemonitor.Dockerfile index e444d0cd4..ffda678ec 100644 --- a/docker/lifemonitor.Dockerfile +++ b/docker/lifemonitor.Dockerfile @@ -1,9 +1,9 @@ -FROM python:3.7-buster as base +FROM python:3.9-buster as base # Install base requirements RUN apt-get update -q \ && apt-get install -y --no-install-recommends \ - bash \ + bash lftp rsync \ redis-tools \ postgresql-client-11 \ && apt-get clean -y && rm -rf /var/lib/apt/lists @@ -60,7 +60,7 @@ RUN mkdir -p /var/data/lm \ USER lm # Copy lifemonitor app -COPY --chown=lm:lm app.py gunicorn.conf.py /lm/ +COPY --chown=lm:lm app.py lm-admin.py gunicorn.conf.py /lm/ COPY --chown=lm:lm specs /lm/specs COPY --chown=lm:lm lifemonitor /lm/lifemonitor COPY --chown=lm:lm migrations /lm/migrations diff --git a/k8s/Chart.yaml b/k8s/Chart.yaml index 2e2097cce..3475dc8ed 100644 --- a/k8s/Chart.yaml +++ b/k8s/Chart.yaml @@ -7,12 +7,12 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.7.0 +version: 0.8.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. -appVersion: 0.7.1 +appVersion: 0.7.2 # Chart dependencies dependencies: diff --git a/k8s/pvc-backend-backup.yaml b/k8s/pvc-backend-backup.yaml new file mode 100644 index 000000000..2dc8a2b83 --- /dev/null +++ b/k8s/pvc-backend-backup.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: data-api-backup +spec: + accessModes: + - ReadWriteMany + resources: + requests: + storage: 1Gi diff --git a/k8s/templates/_helpers.tpl b/k8s/templates/_helpers.tpl index 1468ad3a3..7187f27c3 100644 --- a/k8s/templates/_helpers.tpl +++ b/k8s/templates/_helpers.tpl @@ -132,3 +132,25 @@ Define mount points shared by some pods. - name: lifemonitor-data mountPath: "/var/data/lm" {{- end -}} + + +{{/* +Define command to mirror (cluster) local backup to a remote site via SFTP +*/}} +{{- define "backup.remote.command" -}} +{{- if and .Values.backup.remote .Values.backup.remote.enabled }} +{{- if eq (.Values.backup.remote.protocol | lower) "sftp" }} +{{- printf "lftp -c \"open -u %s,%s sftp://%s; mirror -e -R /var/data/backup %s \"" + .Values.backup.remote.user .Values.backup.remote.password + .Values.backup.remote.host .Values.backup.remote.path +}} +{{- else if eq (.Values.backup.remote.protocol | lower) "ftps" }} +{{- printf "lftp -c \"%s %s open -u %s,%s ftp://%s; mirror -e -R /var/data/backup %s \"" + "set ftp:ssl-auth TLS; set ftp:ssl-force true;" + "set ftp:ssl-protect-list yes; set ftp:ssl-protect-data yes;" + .Values.backup.remote.user .Values.backup.remote.password + .Values.backup.remote.host .Values.backup.remote.path +}} +{{- end }} +{{- end }} +{{- end }} \ No newline at end of file diff --git a/k8s/templates/backend-deployment.yaml b/k8s/templates/backend-deployment.yaml index 098bb3282..14e13703e 100644 --- a/k8s/templates/backend-deployment.yaml +++ b/k8s/templates/backend-deployment.yaml @@ -38,7 +38,7 @@ spec: image: {{ include "chart.lifemonitor.image" . }} imagePullPolicy: {{ .Values.lifemonitor.imagePullPolicy }} command: ["/bin/sh","-c"] - args: ["wait-for-redis.sh && wait-for-postgres.sh && flask init wait-for-db"] + args: ["wait-for-redis.sh && wait-for-postgres.sh && ./lm-admin.py db wait-for-db"] env: {{- include "lifemonitor.common-env" . | nindent 12 }} volumeMounts: diff --git a/k8s/templates/job-backup.yaml b/k8s/templates/job-backup.yaml new file mode 100644 index 000000000..67de94b27 --- /dev/null +++ b/k8s/templates/job-backup.yaml @@ -0,0 +1,51 @@ +{{- if .Values.backup.enabled -}} +apiVersion: batch/v1beta1 +kind: CronJob +metadata: + name: {{ include "chart.fullname" . }}-backup + labels: + app.kubernetes.io/name: {{ include "chart.name" . }} + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +spec: + schedule: "{{ .Values.backup.schedule }}" + successfulJobsHistoryLimit: {{ .Values.backup.successfulJobsHistoryLimit }} + failedJobsHistoryLimit: {{ .Values.backup.failedJobsHistoryLimit }} + jobTemplate: + spec: + template: + spec: + containers: + - name: lifemonitor-backup + image: {{ include "chart.lifemonitor.image" . }} + imagePullPolicy: {{ .Values.lifemonitor.imagePullPolicy }} + command: ["/bin/bash","-c"] + args: + - wait-for-redis.sh && wait-for-postgres.sh ; + ./lm-admin.py backup ; + env: + {{- include "lifemonitor.common-env" . | nindent 12 }} + volumeMounts: + {{- include "lifemonitor.common-volume-mounts" . | nindent 12 }} + - name: lifemonitor-backup + mountPath: "/var/data/backup" + restartPolicy: OnFailure + volumes: + {{- include "lifemonitor.common-volume" . | nindent 10 }} + - name: lifemonitor-backup + persistentVolumeClaim: + claimName: {{ .Values.backup.existingClaim }} + {{- with .Values.lifemonitor.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 10 }} + {{- end }} + {{- with .Values.lifemonitor.affinity }} + affinity: + {{- toYaml . | nindent 10 }} + {{- end }} + {{- with .Values.lifemonitor.tolerations }} + tolerations: + {{- toYaml . | nindent 10 }} + {{- end }} + backoffLimit: 4 +{{- end }} \ No newline at end of file diff --git a/k8s/templates/job-init.yaml b/k8s/templates/job-init.yaml index ead9c3fc5..a82e1b4f8 100644 --- a/k8s/templates/job-init.yaml +++ b/k8s/templates/job-init.yaml @@ -20,7 +20,7 @@ spec: image: {{ include "chart.lifemonitor.image" . }} imagePullPolicy: {{ .Values.lifemonitor.imagePullPolicy }} command: ["/bin/sh","-c"] - args: ["wait-for-redis.sh && wait-for-postgres.sh && flask init db && flask task-queue reset"] + args: ["wait-for-redis.sh && wait-for-postgres.sh && ./lm-admin.py db init && ./lm-admin.py task-queue reset"] env: {{- include "lifemonitor.common-env" . | nindent 10 }} volumeMounts: diff --git a/k8s/templates/secret.yaml b/k8s/templates/secret.yaml index 5bee20989..b1868307a 100644 --- a/k8s/templates/secret.yaml +++ b/k8s/templates/secret.yaml @@ -69,6 +69,22 @@ stringData: MAIL_USE_SSL={{- if .Values.mail.ssl -}}True{{- else -}}False{{- end }} MAIL_DEFAULT_SENDER={{ .Values.mail.default_sender }} + {{- if .Values.backup.enabled }} + # Backups + BACKUP_LOCAL_PATH="/var/data/backup" + {{- if .Values.backup.retain_days }} + BACKUP_RETAIN_DAYS={{ .Values.backup.retain_days }} + {{- end }} + {{- if .Values.backup.remote.enabled }} + BACKUP_REMOTE_PATH={{ .Values.backup.remote.path }} + BACKUP_REMOTE_HOST={{ .Values.backup.remote.host }} + BACKUP_REMOTE_USER={{ .Values.backup.remote.user }} + BACKUP_REMOTE_PASSWORD={{ .Values.backup.remote.password }} + BACKUP_REMOTE_ENABLE_TLS={{- if .Values.backup.remote.tls }}True{{- else -}}False{{- end }} + {{- end }} + {{- end }} + + # Set admin credentials LIFEMONITOR_ADMIN_PASSWORD={{ .Values.lifemonitor.administrator.password }} diff --git a/k8s/templates/worker-deployment.yaml b/k8s/templates/worker-deployment.yaml index c0309e564..1c61bc484 100644 --- a/k8s/templates/worker-deployment.yaml +++ b/k8s/templates/worker-deployment.yaml @@ -35,7 +35,7 @@ spec: image: {{ include "chart.lifemonitor.image" . }} imagePullPolicy: {{ .Values.lifemonitor.imagePullPolicy }} command: ["/bin/sh","-c"] - args: ["wait-for-redis.sh && wait-for-postgres.sh && flask init wait-for-db"] + args: ["wait-for-redis.sh && wait-for-postgres.sh && ./lm-admin.py db wait-for-db"] env: {{- include "lifemonitor.common-env" . | nindent 12 }} volumeMounts: diff --git a/k8s/values.yaml b/k8s/values.yaml index 4a4fdd7ef..ab62d2a68 100644 --- a/k8s/values.yaml +++ b/k8s/values.yaml @@ -81,6 +81,24 @@ mail: ssl: true default_sender: "" +# Backup settings +backup: + enabled: false + schedule: "* 3 * * *" + retain_days: 30 + successfulJobsHistoryLimit: 30 + failedJobsHistoryLimit: 30 + existingClaim: data-api-backup + # Settings to mirror the (cluster) local backup + # to a remote site via FTPS or SFTP + remote: + enabled: false + user: username + password: password + host: 10.0.1.135 + path: /user/home/lm-backups + tls: true + lifemonitor: replicaCount: 1 diff --git a/lifemonitor/commands/api_key.py b/lifemonitor/commands/api_key.py index d61ee7356..d6c77d6fc 100644 --- a/lifemonitor/commands/api_key.py +++ b/lifemonitor/commands/api_key.py @@ -33,18 +33,21 @@ # define the blueprint for DB commands blueprint = Blueprint('api-key', __name__) +# set CLI help +blueprint.cli.help = "Manage admin API keys" + @blueprint.cli.command('create') -@click.argument("username") -@click.option("--scope", "scope", # type=click.Choice(ApiKey.SCOPES), +@click.option("--scope", "scope", default="read", show_default=True) @click.option("--length", "length", default=40, type=int, show_default=True) @with_appcontext -def api_key_create(username, scope="read", length=40): +def api_key_create(scope="read", length=40): """ - Create an API Key for a given user (identified by username) + Create an API Key for the 'admin' user """ - logger.debug("Finding User '%s'...", username) + username = "admin" + logger.debug("Finding user '%s'...", username) user = User.find_by_username(username) if not user: print("User not found", file=sys.stderr) @@ -52,25 +55,25 @@ def api_key_create(username, scope="read", length=40): logger.debug("User found: %r", user) api_key = generate_new_api_key(user, scope, length) print("%r" % api_key) - logger.debug("ApiKey created") + logger.debug("Api key created") @blueprint.cli.command('list') -@click.argument("username") @with_appcontext -def api_key_list(username): +def api_key_list(): """ - Create an API Key for a given user (identified by username) + Create an API Key for the 'admin' user """ - logger.debug("Finding User '%s'...", username) + username = "admin" + logger.debug("Finding user '%s'...", username) user = User.find_by_username(username) if not user: print("User not found", file=sys.stderr) sys.exit(99) logger.debug("User found: %r", user) - logger.info('-' * 82) - logger.info("User '%s' ApiKeys", user.username) - logger.info('-' * 82) + print('-' * 82) + print("Api keys of user '%s'" % user.username) + print('-' * 82) for key in user.api_keys: print(key) @@ -80,27 +83,27 @@ def api_key_list(username): @with_appcontext def api_key_delete(api_key): """ - Create an API Key for a given user (identified by username) + Create an API Key for the 'admin' user """ - logger.debug("Finding ApiKey '%s'...", api_key) + logger.debug("Finding Api key '%s'...", api_key) key = ApiKey.find(api_key) if not key: - print("ApiKey not found", file=sys.stderr) + print("Api key not found", file=sys.stderr) sys.exit(99) - logger.debug("ApiKey found: %r", key) + logger.debug("Api key found: %r", key) key.delete() - print("ApiKey '%s' deleted!" % api_key) - logger.debug("ApiKey created") + print("Api key '%s' deleted!" % api_key) + logger.debug("Api key created") @blueprint.cli.command('clean') -@click.argument("username") @with_appcontext -def api_key_clean(username): +def api_key_clean(): """ - Create an API Key for a given user (identified by username) + Create an API Key for the 'admin' user """ - logger.debug("Finding User '%s'...", username) + username = "admin" + logger.debug("Finding user '%s'...", username) user = User.find_by_username(username) if not user: print("User not found", file=sys.stderr) @@ -109,7 +112,7 @@ def api_key_clean(username): count = 0 for key in user.api_keys: key.delete() - print("ApiKey '%s' deleted!" % key.key) + print("Api key '%s' deleted!" % key.key) count += 1 print("%d ApiKeys deleted!" % count, file=sys.stderr) - logger.debug("ApiKeys of User '%s' deleted!", user.username) + logger.debug("ApiKeys of user '%s' deleted!", user.username) diff --git a/lifemonitor/commands/backup.py b/lifemonitor/commands/backup.py new file mode 100644 index 000000000..1b0b11471 --- /dev/null +++ b/lifemonitor/commands/backup.py @@ -0,0 +1,216 @@ +# Copyright (c) 2022 CRS4 +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + + +import logging +import os +import subprocess +import sys +import time +from pathlib import Path + +import click +from click_option_group import GroupedOption, optgroup +from flask import current_app +from flask.blueprints import Blueprint +from flask.cli import with_appcontext +from flask.config import Config +from lifemonitor.utils import FtpUtils + +from .db import backup, backup_options + +# set module level logger +logger = logging.getLogger() + +# define the blueprint for DB commands +_blueprint = Blueprint('backup', __name__) + +# set help for the CLI command +_blueprint.cli.help = "Manage backups of database and RO-Crates" + + +class RequiredIf(GroupedOption): + def __init__(self, *args, **kwargs): + self.required_if = kwargs.pop('required_if') + assert self.required_if, "'required_if' parameter required" + kwargs['help'] = ("%s (NOTE: This argument is required if '%s' is True)" % + (kwargs.get('help', ''), self.required_if)).strip() + super(RequiredIf, self).__init__(*args, **kwargs) + + def handle_parse_result(self, ctx, opts, args): + we_are_present = self.name in opts + other_present = self.required_if in opts + + if other_present: + if not we_are_present: + raise click.UsageError( + "Illegal usage: '%s' is required when '%s' is True" % ( + self.name, self.required_if)) + else: + self.prompt = None + + return super(RequiredIf, self).handle_parse_result( + ctx, opts, args) + + +def synch_otptions(func): + func = optgroup.option('--enable-tls', default=False, is_flag=True, show_default=True, + help="Enable FTP over TLS")(func) + func = optgroup.option('-t', '--target', default="/", show_default=True, + help="Remote target path")(func) + func = optgroup.option('-p', '--password', cls=RequiredIf, required_if='synch', + help="Password of the FTP account")(func) + func = optgroup.option('-u', '--user', cls=RequiredIf, required_if='synch', + help="Username of the FTP account")(func) + func = optgroup.option('-h', '--host', cls=RequiredIf, required_if='synch', + help="Hostame of the FTP server")(func) + func = optgroup.group('\nSettings to connect with a remote site via FTP or FTPS')(func) + func = click.option('-s', '--synch', default=False, show_default=True, + is_flag=True, help="Enable sync with a remote FTPS server")(func) + return func + + +def __remote_synch__(source: str, target: str, + host: str, user: str, password: str, + enable_tls: bool): + try: + ftp_utils = FtpUtils(host, user, password, enable_tls) + ftp_utils.sync(source, target) + print("Synch of local '%s' with remote '%s' completed!" % (source, target)) + return 0 + except Exception as e: + logger.debug(e) + print("Unable to synch remote site. ERROR: %s" % str(e)) + return 1 + + +@_blueprint.cli.group(name="backup", invoke_without_command=True) +@with_appcontext +@click.pass_context +def bck(ctx): + if not ctx.invoked_subcommand: + auto(current_app.config) + + +@bck.command("db") +@backup_options +@synch_otptions +@with_appcontext +def db_cmd(file, directory, verbose, *args, **kwargs): + """ + Make a backup of the database + """ + result = backup_db(directory, file, verbose, *args, **kwargs) + sys.exit(result) + + +def backup_db(directory, file=None, verbose=False, *args, **kwargs): + logger.debug(sys.argv) + result = backup(directory, file, verbose) + if result.returncode == 0: + synch = kwargs.pop('synch', False) + if synch: + return __remote_synch__(source=directory, **kwargs) + return result.returncode + + +@bck.command("crates") +@click.option("-d", "--directory", default="./", show_default=True, + help="Local path to store RO-Crates") +@synch_otptions +@with_appcontext +def crates_cmd(directory, *args, **kwargs): + """ + Make a backup of the registered workflow RO-Crates + """ + result = backup_crates(current_app.config, directory, *args, **kwargs) + sys.exit(result) + + +def backup_crates(config, directory, *args, **kwargs): + assert config.get("DATA_WORKFLOWS", None), "DATA_WORKFLOWS not configured" + rocrate_source_path = config.get("DATA_WORKFLOWS").removesuffix('/') + os.makedirs(directory, exist_ok=True) + result = subprocess.run(f'rsync -avh --delete {rocrate_source_path}/ {directory} ', shell=True, capture_output=True) + if result.returncode == 0: + print("Created backup of workflow RO-Crates @ '%s'" % directory) + synch = kwargs.pop('synch', False) + if synch: + logger.debug("Remaining args: %r", kwargs) + return __remote_synch__(source=directory, **kwargs) + else: + print("Unable to backup workflow RO-Crates\n%s", result.stderr.decode()) + return result.returncode + + +def auto(config: Config): + logger.debug("Current app config: %r", config) + base_path = config.get("BACKUP_LOCAL_PATH", None) + if not base_path: + click.echo("No BACKUP_LOCAL_PATH found in your settings") + sys.exit(0) + + # set paths + base_path = base_path.removesuffix('/') # remove trailing '/' + db_backups = f"{base_path}/db" + rc_backups = f"{base_path}/crates" + logger.debug("Backup paths: %r - %r - %r", base_path, db_backups, rc_backups) + # backup database + result = backup(db_backups) + if result.returncode != 0: + sys.exit(result.returncode) + # backup crates + result = backup_crates(config, rc_backups) + if result != 0: + sys.exit(result) + # clean up old files + retain_days = int(config.get("BACKUP_RETAIN_DAYS", -1)) + logger.debug("RETAIN DAYS: %d", retain_days) + if retain_days > -1: + now = time.time() + for file in Path(db_backups).glob('*'): + if file.is_file(): + logger.debug("Check st_mtime of file %s: %r < %r", + file.absolute(), os.path.getmtime(file), now - int(retain_days) * 86400) + if os.path.getmtime(file) < now - int(retain_days) * 86400: + logger.debug("Removing %s", file.absolute()) + os.remove(file.absolute()) + logger.info("File %s removed from remote site", file.absolute()) + # synch with a remote site + if config.get("BACKUP_REMOTE_PATH", None): + # check REMOTE_* params + required_params = ["BACKUP_REMOTE_PATH", "BACKUP_REMOTE_HOST", + "BACKUP_REMOTE_USER", "BACKUP_REMOTE_PASSWORD", + "BACKUP_REMOTE_ENABLE_TLS"] + for p in required_params: + if not config.get(p, None): + print(f"Missing '{p}' on your settings!") + print("Required params are: %s", ", ".join(required_params)) + sys.exit(128) + __remote_synch__(base_path, config.get("BACKUP_REMOTE_PATH"), + config.get("BACKUP_REMOTE_HOST"), + config.get("BACKUP_REMOTE_USER"), config.get("BACKUP_REMOTE_PASSWORD"), + config.get("BACKUP_REMOTE_ENABLE_TLS", False)) + else: + logger.warning("Remote backup not configured") + + +# export backup command +commands = [bck] diff --git a/lifemonitor/commands/cache.py b/lifemonitor/commands/cache.py index 36180b668..35d43a9af 100644 --- a/lifemonitor/commands/cache.py +++ b/lifemonitor/commands/cache.py @@ -29,6 +29,9 @@ # define the blueprint for DB commands blueprint = Blueprint('cache', __name__) +# set help for the CLI command +blueprint.cli.help = "Manage cache" + @blueprint.cli.command('clear') @with_appcontext diff --git a/lifemonitor/commands/db.py b/lifemonitor/commands/db.py index d4114f424..adf3d510b 100644 --- a/lifemonitor/commands/db.py +++ b/lifemonitor/commands/db.py @@ -20,30 +20,37 @@ import logging +import os +import subprocess +import sys +from datetime import datetime import click from flask import current_app -from flask.blueprints import Blueprint from flask.cli import with_appcontext -from flask_migrate import current, stamp, upgrade +from flask_migrate import cli, current, stamp, upgrade from lifemonitor.auth.models import User +from lifemonitor.utils import hide_secret # set module level logger logger = logging.getLogger() -# define the blueprint for DB commands -blueprint = Blueprint('init', __name__) +# export from this module +commands = [cli.db] + +# update help for the DB command +cli.db.help = "Manage database" # set initial revision number initial_revision = '8b2e530dc029' -@blueprint.cli.command('db') +@cli.db.command() @click.option("-r", "--revision", default="head") @with_appcontext -def init_db(revision): +def init(revision): """ - Initialize LifeMonitor App + Initialize app database """ from lifemonitor.db import create_db, db, db_initialized, db_revision @@ -77,11 +84,11 @@ def init_db(revision): db.session.commit() -@blueprint.cli.command('wait-for-db') +@cli.db.command() @with_appcontext def wait_for_db(): """ - Wait until that DB is initialized + Wait until that DBMS service is up and running """ from lifemonitor.db import db_initialized, db_revision @@ -94,3 +101,162 @@ def wait_for_db(): while current_revision is None: current_revision = db_revision() logger.info(f"Current revision: {current_revision}") + + +# define common options +verbose_option = click.option("-v", "--verbose", default=False, is_flag=True, help="Enable verbose mode") + + +def backup_options(func): + # backup command options (evaluated in reverse order!) + func = verbose_option(func) + func = click.option("-f", "--file", default=None, help="Backup filename (default 'hhmmss_yyyymmdd.tar')")(func) + func = click.option("-d", "--directory", default="./", help="Directory path for the backup file (default '.')")(func) + return func + + +@cli.db.command("backup") +@backup_options +@with_appcontext +def backup_cmd(directory, file, verbose): + """ + Make a backup of the current app database + """ + result = backup(directory, file, verbose) + # report exit code to the main process + sys.exit(result.returncode) + + +def backup(directory, file=None, verbose=False) -> subprocess.CompletedProcess: + """ + Make a backup of the current app database + """ + logger.debug("%r - %r - %r", file, directory, verbose) + from lifemonitor.db import db_connection_params + params = db_connection_params() + if not file: + file = f"{datetime.now().strftime('%Y%m%d_%H%M%S')}.tar" + os.makedirs(directory, exist_ok=True) + target_path = os.path.join(directory, file) + cmd = f"PGPASSWORD={params['password']} pg_dump -h {params['host']} -U {params['user']} -F t {params['dbname']} > {target_path}" + if verbose: + print("Output file: %s" % target_path) + print("Backup command: %s" % hide_secret(cmd, params['password'])) + result = subprocess.run(cmd, shell=True, capture_output=True) + logger.debug("Backup result: %r", hide_secret(result, params['password'])) + if result.returncode == 0: + msg = f"Created backup of database {params['dbname']} @ {target_path}" + logger.debug(msg) + print(msg) + else: + click.echo("\nERROR Unable to backup the database: %s" % result.stderr.decode()) + if verbose and result.stderr: + print("ERROR [stderr]: %s" % result.stderr.decode()) + return result + + +@cli.db.command() +@click.argument("file") +@click.option("-s", "--safe", default=False, is_flag=True, + help="Preserve the current database renaming it as '_yyyymmdd_hhmmss'") +@verbose_option +@with_appcontext +def restore(file, safe, verbose): + """ + Restore a backup of the app database + """ + from lifemonitor.db import (create_db, db_connection_params, db_exists, + drop_db, rename_db) + + # check if DB file exists + if not os.path.isfile(file): + print("File '%s' not found!" % file) + sys.exit(128) + # check if delete or preserve the current app database (if exists) + new_db_name = None + params = db_connection_params() + db_copied = False + if db_exists(params['dbname']): + if safe: + answer = input(f"The database '{params['dbname']}' will be renamed. Continue? (y/n): ") + if not answer.lower() in ('y', 'yes'): + sys.exit(0) + else: + answer = input(f"The database '{params['dbname']}' will be delete. Continue? (y/n): ") + if not answer.lower() in ('y', 'yes'): + sys.exit(0) + # create a snapshot of the current database + new_db_name = f"{params['dbname']}_{datetime.now().strftime('%Y%m%d_%H%M%S')}" + rename_db(params['dbname'], new_db_name) + db_copied = True + msg = f"Created a DB snapshot: data '{params['dbname']}' temporarily renamed as '{new_db_name}'" + logger.debug(msg) + if verbose: + print(msg) + # restore database + create_db(current_app.config) + cmd = f"PGPASSWORD={params['password']} pg_restore -h {params['host']} -U {params['user']} -d {params['dbname']} -v {file}" + if verbose: + print("Dabaset file: %s" % file) + print("Backup command: %s" % hide_secret(cmd, params['password'])) + result = subprocess.run(cmd, shell=True) + logger.debug("Restore result: %r", hide_secret(cmd, params['password'])) + if result.returncode == 0: + if db_copied and safe: + print(f"Existing database '{params['dbname']}' renamed as '{new_db_name}'") + msg = f"Backup {file} restored to database '{params['dbname']}'" + logger.debug(msg) + print(msg) + # if mode is set to 'not safe' + # delete the temp snapshot of the current database + if not safe: + drop_db(db_name=new_db_name) + msg = f"Current database '{params['dbname']}' deleted" + logger.debug(msg) + if verbose: + print(msg) + else: + # if any error occurs + # restore the previous latest version of the DB + # previously saved as temp snapshot + if new_db_name: + # delete the db just created + drop_db() + # restore the old database snapshot + rename_db(new_db_name, params['dbname']) + db_copied = True + msg = f"Database restored '{params['dbname']}' renamed as '{new_db_name}'" + logger.debug(msg) + if verbose: + print(msg) + print("ERROR: Unable to restore the database backup") + if verbose and result.stderr: + print("ERROR [stderr]: %s" % result.stderr.decode()) + # report exit code to the main process + sys.exit(result.returncode) + + +@cli.db.command() +@click.argument("snapshot", default="current") +@with_appcontext +def drop(snapshot): + """ + Drop (a snapshot of) the app database. + + A snapshot is specified by a datetime formatted as yyyymmdd_hhmmss: e.g., 20220324_100137. + + If no snaphot is provided the current app database will be removed. + """ + from lifemonitor.db import db_connection_params, drop_db + db_name = db_connection_params()['dbname'] + if snapshot and snapshot != "current": + if not snapshot.startswith(db_name): + db_name = f"{db_name}_{snapshot}" + else: + db_name = snapshot + answer = input(f"The database '{db_name}' will be removed. Are you sure? (y/n): ") + if answer.lower() in ('y', 'yes'): + drop_db(db_name=db_name) + print(f"Database '{db_name}' removed") + else: + print("Database deletion aborted") diff --git a/lifemonitor/commands/oauth.py b/lifemonitor/commands/oauth.py index 04692e6e7..f69add16e 100644 --- a/lifemonitor/commands/oauth.py +++ b/lifemonitor/commands/oauth.py @@ -33,6 +33,9 @@ # define the blueprint for DB commands blueprint = Blueprint('oauth', __name__) +# set CLI help +blueprint.cli.help = "Manage credentials for OAuth2 clients" + def invalidate_token(token): invalid_token = token.copy() diff --git a/lifemonitor/commands/registry.py b/lifemonitor/commands/registry.py index 40ab39df8..b01a3f515 100644 --- a/lifemonitor/commands/registry.py +++ b/lifemonitor/commands/registry.py @@ -33,6 +33,9 @@ # define the blueprint for DB commands blueprint = Blueprint('registry', __name__) +# set CLI help +blueprint.cli.help = "Manage workflow registries" + # instance of LifeMonitor service lm = LifeMonitor.get_instance() diff --git a/lifemonitor/commands/tasks.py b/lifemonitor/commands/tasks.py index ef35a4a96..b2ab57823 100644 --- a/lifemonitor/commands/tasks.py +++ b/lifemonitor/commands/tasks.py @@ -29,6 +29,9 @@ # define the blueprint for DB commands blueprint = Blueprint('task-queue', __name__) +# set CLI help +blueprint.cli.help = "Manage task queue" + @blueprint.cli.command('reset') @with_appcontext diff --git a/lifemonitor/config.py b/lifemonitor/config.py index 24fca28ff..9a6946b09 100644 --- a/lifemonitor/config.py +++ b/lifemonitor/config.py @@ -18,6 +18,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +import configparser import logging import os from logging.config import dictConfig @@ -130,6 +131,17 @@ class TestingSupportConfig(TestingConfig): _config_by_name = {cfg.CONFIG_NAME: cfg for cfg in _EXPORT_CONFIGS} +def get_config(settings=None): + # set app env + app_env = os.environ.get("FLASK_ENV", "production") + if app_env != 'production': + # Set the DEBUG_METRICS env var to also enable the + # prometheus metrics exporter when running in development mode + os.environ['DEBUG_METRICS'] = 'true' + # load app config + return get_config_by_name(app_env, settings=settings) + + def get_config_by_name(name, settings=None): try: config = type(f"AppConfigInstance{name}".title(), (_config_by_name[name],), {}) @@ -152,6 +164,70 @@ def get_config_by_name(name, settings=None): return ProductionConfig +class LogFilter(logging.Filter): + def __init__(self, param=None): + self.param = param + try: + config = configparser.ConfigParser() + config.read('setup.cfg') + self.filters = [_.strip() for _ in config.get('logging', 'filters').split(',')] + except Exception: + self.filters = [] + + def filter(self, record): + try: + filtered = False + for k in self.filters: + if k in record.name: + filtered = True + if not filtered: + if 'Requester' in record.name: + record.msg = "Request to Github API" + logger.debug("Request args: %r %r %r %r", record.args[0], record.args[1], record.args[2], record.args[3]) + record.args = None + except Exception as e: + logger.exception(e) + return not filtered + + +BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8) + +# These are the sequences need to get colored ouput +RESET_SEQ = "\033[0m" +COLOR_SEQ = "\033[1;%dm" +BOLD_SEQ = "\033[1m" + + +def formatter_message(message, use_color=True): + if use_color: + message = message.replace("$RESET", RESET_SEQ).replace("$BOLD", BOLD_SEQ) + else: + message = message.replace("$RESET", "").replace("$BOLD", "") + return message + + +COLORS = { + 'WARNING': YELLOW, + 'INFO': WHITE, + 'DEBUG': BLUE, + 'CRITICAL': YELLOW, + 'ERROR': RED +} + + +class ColorFormatter(logging.Formatter): + def __init__(self, format, use_color=True): + logging.Formatter.__init__(self, format) + self.use_color = use_color + + def format(self, record): + levelname = record.levelname + if self.use_color and levelname in COLORS: + record.levelname = f"{COLOR_SEQ % (30 + COLORS[levelname])}{levelname}{RESET_SEQ}" + record.module = f"{COLOR_SEQ % (30 + COLORS[levelname])}{record.module}{RESET_SEQ}" + return logging.Formatter.format(self, record) + + def configure_logging(app): level_str = app.config.get('LOG_LEVEL', 'INFO') error = False @@ -164,12 +240,21 @@ def configure_logging(app): dictConfig({ 'version': 1, 'formatters': {'default': { - 'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s', + '()': ColorFormatter, + 'format': + f'[{COLOR_SEQ % (90)}%(asctime)s{RESET_SEQ}] %(levelname)s in %(module)s: {COLOR_SEQ % (90)}%(message)s{RESET_SEQ}', }}, + 'filters': { + 'myfilter': { + '()': LogFilter, + # 'param': '', + } + }, 'handlers': {'wsgi': { 'class': 'logging.StreamHandler', 'stream': 'ext://flask.logging.wsgi_errors_stream', - 'formatter': 'default' + 'formatter': 'default', + 'filters': ['myfilter'] }}, 'response': { 'level': logging.INFO, diff --git a/lifemonitor/db.py b/lifemonitor/db.py index 77411633d..94e784a97 100644 --- a/lifemonitor/db.py +++ b/lifemonitor/db.py @@ -154,11 +154,25 @@ def create_db(settings=None, drop=False): logger.debug('DB %s created.', new_db_name.string) -def drop_db(settings=None): +def rename_db(old_name: str, new_name: str, settings=None): + + db.engine.dispose() + con = db_connect(settings=settings, override_db_name='postgres') + try: + con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) + with con.cursor() as cur: + cur.execute(f'ALTER DATABASE {old_name} RENAME TO {new_name}') + finally: + con.close() + + logger.debug('DB %s renamed to %s.', old_name, new_name) + + +def drop_db(db_name: str = None, settings=None): """Clear existing data and create new tables.""" - actual_db_name = get_db_connection_param("POSTGRESQL_DATABASE", settings) + actual_db_name = db_name or get_db_connection_param("POSTGRESQL_DATABASE", settings) logger.debug("Actual DB name: %r", actual_db_name) - + db.engine.dispose() con = db_connect(settings=settings, override_db_name='postgres') try: con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) diff --git a/lifemonitor/static/src/package.json b/lifemonitor/static/src/package.json index 1b2f658d9..f5d2cfb7d 100644 --- a/lifemonitor/static/src/package.json +++ b/lifemonitor/static/src/package.json @@ -1,7 +1,7 @@ { "name": "lifemonitor", "description": "Workflow Testing Service", - "version": "0.7.1", + "version": "0.7.2", "license": "MIT", "author": "CRS4", "main": "../dist/js/lifemonitor.min.js", diff --git a/lifemonitor/utils.py b/lifemonitor/utils.py index da0868932..4e8ab5224 100644 --- a/lifemonitor/utils.py +++ b/lifemonitor/utils.py @@ -20,6 +20,7 @@ import base64 +import ftplib import functools import glob import json @@ -34,6 +35,7 @@ import urllib import uuid import zipfile +from datetime import datetime from importlib import import_module from os.path import basename, dirname, isfile, join from typing import List @@ -41,6 +43,7 @@ import flask import requests import yaml +from dateutil import parser from . import exceptions as lm_exceptions @@ -131,6 +134,11 @@ def sizeof_fmt(num, suffix='B'): return "%.1f%s%s" % (num, 'Yi', suffix) +def hide_secret(text: str, secret: str, replace_with="*****") -> str: + text = str(text) if not isinstance(text, str) else text + return text if not text else text.replace(secret, replace_with) + + def decodeBase64(str, as_object=False, encoding='utf-8'): result = base64.b64decode(str) if not result: @@ -505,3 +513,145 @@ def encode_object(cls, obj: object) -> str: @classmethod def decode(cls, data: str) -> object: return base64.b64decode(data.encode()) + + +class FtpUtils(): + + def __init__(self, host, user, password, enable_tls) -> None: + self._ftp = None + self.host = host + self.user = user + self.passwd = password + self.tls_enabled = enable_tls + self._metadata_remote_files = {} + + def __del__(self): + if self._ftp: + try: + logger.debug("Closing remote connection...") + self._ftp.close() + logger.debug("Closing remote connection... DONE") + except Exception as e: + logger.debug(e) + + @property + def ftp(self) -> ftplib.FTP_TLS: + if not self._ftp: + cls = ftplib.FTP_TLS if self.tls_enabled else ftplib.FTP + self._ftp = cls(self.host) + self._ftp.login(self.user, self.passwd) + return self._ftp + + def is_dir(self, path) -> bool: + """ Check whether a remote path is a directory """ + cwd = self.ftp.pwd() + try: + self.ftp.cwd(path) + return True + except Exception: + return False + finally: + self.ftp.cwd(cwd) + + def get_file_metadata(self, directory, filename, use_cache=False): + metadata = self._metadata_remote_files.get(directory, False) if use_cache else None + if not metadata: + metadata = [_ for _ in self.ftp.mlsd(directory)] + self._metadata_remote_files[directory] = metadata + for f in metadata: + if f[0] == filename: + fmeta = f[1] + logger.debug("File metadata: %r", fmeta) + return fmeta + return None + + def sync(self, source, target): + for root, dirs, files in os.walk(source, topdown=True): + for name in dirs: + local_path = os.path.join(root, name) + logger.debug("Local directory path: %s", local_path) + remote_file_path = local_path.replace(source, target) + logger.debug("Remote directory path: %s", remote_file_path) + try: + self.ftp.mkd(remote_file_path) + logger.debug("Created remote directory: %s", remote_file_path) + except Exception as e: + logger.debug("Unable to create remote directory: %s", remote_file_path) + logger.debug(str(e)) + + for name in files: + local_path = os.path.join(root, name) + remote_file_path = f"{target}/{local_path.replace(source + '/', '')}" + logger.debug("Local filepath: %s", local_path) + logger.debug("Remote filepath: %s", remote_file_path) + upload_file = True + try: + metadata = self.get_file_metadata( + os.path.dirname(remote_file_path), name, use_cache=True) + if metadata: + timestamp = metadata['modify'] + remote_time = parser.parse(timestamp).isoformat(' ', 'seconds') + local_time = datetime.utcfromtimestamp(os.path.getmtime(local_path)).isoformat(' ', 'seconds') + logger.debug("Checking: %r - %r", remote_time, local_time) + if local_time <= remote_time: + upload_file = False + logger.debug("File %s not changed... skip upload", remote_file_path) + else: + self.ftp.delete(remote_file_path) + logger.debug("File %s changed... it requires to be reuploaded", remote_file_path) + else: + logger.debug("File %s doesn't exist @ remote path %s", name, remote_file_path) + except Exception as e: + if logger.isEnabledFor(logging.DEBUG): + logger.exception(e) + if upload_file: + with open(local_path, 'rb') as fh: + self.ftp.storbinary('STOR %s' % remote_file_path, fh) + logger.info("Local file '%s' uploaded on remote @ %s", local_path, remote_file_path) + # remove obsolete files on the remote target + self.remove_obsolete_remote_files(source, target) + + def remove_obsolete_remote_files(self, source, target): + """ Remove obsolete files on the remote target """ + for path in self.ftp.nlst(target): + logger.debug("Checking remote path: %r", path) + local_path = path.replace(target, source) + logger.debug("Local path corresponding to remote %s is: %s", path, local_path) + if self.is_dir(path): + logger.debug("Is dir: %s", path) + self.remove_obsolete_remote_files(local_path, path) + # remove remote folder if empty + if len(self.ftp.nlst(path)) == 0: + self.ftp.rmd(path) + logger.debug("Removed remote folder '%s'", path) + else: + if not os.path.isfile(local_path): + logger.debug("Removing remote file '%s'...", path) + try: + self.ftp.delete(path) + logger.debug("Removed remote file '%s'", path) + except Exception as e: + logger.debug(e) + else: + logger.debug("File %s exists @ %s", path, local_path) + + def rm_tree(self, path): + """Recursively delete a directory tree on a remote server.""" + try: + names = self.ftp.nlst(path) + except ftplib.all_errors as e: + logger.debug('Could not remove {0}: {1}'.format(path, e)) + return + + for name in names: + if os.path.split(name)[1] in ('.', '..'): + continue + logger.debug('Checking {0}'.format(name)) + if self.is_dir(name): + self.rm_tree(name) + else: + self.ftp.delete(name) + try: + self.ftp.rmd(path) + except ftplib.all_errors as e: + logger.debug('Could not remove {0}: {1}'.format(path, e)) diff --git a/lm-admin.py b/lm-admin.py new file mode 100755 index 000000000..e1ac8cf80 --- /dev/null +++ b/lm-admin.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2022 CRS4 +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from app import application + + +def main(): + application.cli.main() + + +if __name__ == '__main__': + main() diff --git a/requirements.txt b/requirements.txt index 14b532e83..aac634e40 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,37 +1,40 @@ -Authlib~=0.15.3 -apscheduler==3.8.0 -connexion[swagger-ui]==2.9.0 -dramatiq[redis,watch]==1.11.0 +Authlib~=0.15.4 +apscheduler==3.8.1 +connexion[swagger-ui]~=2.11.2 +dramatiq[redis,watch]==1.12.3 email-validator~=1.1.3 -flask-bcrypt==0.7.1 +Bcrypt-Flask==1.0.2 +click-option-group~=0.5.3 flask-cors==3.0.10 flask-marshmallow~=0.14.0 flask-restful==0.3.9 flask-login~=0.5.0 flask-shell-ipython==0.4.1 -flask-wtf~=0.15.1 -Flask-APScheduler==1.12.2 +flask-wtf~=1.0.0 +Flask-APScheduler==1.12.3 Flask-SQLAlchemy==2.5.1 Flask-Migrate==3.1.0 Flask-Mail~=0.9.1 -Flask>=1.1.4,<2.0.0 +Flask~=2.0.3 gunicorn~=20.1.0 -jwt==1.2.0 +itsdangerous~=2.1.0 +jwt==1.3.1 loginpass==0.5 -marshmallow-sqlalchemy~=0.26.1 +marshmallow-sqlalchemy~=0.27.0 prometheus-flask-exporter>=0.18,<0.19 psycopg2~=2.9.1 -pyopenssl==21.0.0 -pytest-mock~=3.6.1 -pytest~=6.2.5 +pyopenssl==22.0.0 +pytest-mock~=3.7.0 +pytest~=7.0.1 python-dotenv~=0.19.0 python-jenkins==1.7.0 python-redis-lock~=3.7.0 PyGithub~=1.55 PyYAML~=5.4.1 pika~=1.2.0 -redis~=3.5.3 -requests~=2.26.0 -rocrate==0.5.2 -SQLAlchemy~=1.3.23 -wheel~=0.37.0 +redis~=4.1.4 +requests~=2.27.1 +rocrate==0.5.5 +SQLAlchemy~=1.3.24 +wheel~=0.37.1 +Werkzeug~=2.0.0 diff --git a/settings.conf b/settings.conf index fdaa9c056..834da225b 100644 --- a/settings.conf +++ b/settings.conf @@ -66,13 +66,24 @@ MAIL_USE_TLS=False MAIL_USE_SSL=True MAIL_DEFAULT_SENDER='' +# Storage path of workflow RO-Crates +# DATA_WORKFLOWS = "./data" + # Cache settings CACHE_REDIS_DB=0 CACHE_DEFAULT_TIMEOUT=300 CACHE_REQUEST_TIMEOUT=15 CACHE_SESSION_TIMEOUT=3600 CACHE_WORKFLOW_TIMEOUT=1800 -CACHE_BUILD_TIMEOUT=84600 + +# Backup settings +BACKUP_LOCAL_PATH="./backups" +BACKUP_RETAIN_DAYS=30 +# BACKUP_REMOTE_PATH="lm-backups" +# BACKUP_REMOTE_HOST="ftp-site.domain.it" +# BACKUP_REMOTE_USER="lm" +# BACKUP_REMOTE_PASSWORD="foobar" +# BACKUP_REMOTE_ENABLE_TLS=True # Github OAuth2 settings #GITHUB_CLIENT_ID="___YOUR_GITHUB_OAUTH2_CLIENT_ID___" diff --git a/setup.cfg b/setup.cfg index 9d913ef01..78e3f2167 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,3 +6,6 @@ VCS = git style = pep440 versionfile_source = lifemonitor/_version.py tag_prefix = + +[logging] +filters = connexion,validator \ No newline at end of file diff --git a/specs/api.yaml b/specs/api.yaml index f340670a3..299a4d8b4 100644 --- a/specs/api.yaml +++ b/specs/api.yaml @@ -3,7 +3,7 @@ openapi: "3.0.0" info: - version: "0.7.1" + version: "0.7.2" title: "Life Monitor API" description: | *Workflow sustainability service* @@ -18,7 +18,7 @@ info: servers: - url: / description: > - Version 0.7.1 of API. + Version 0.7.2 of API. tags: - name: Registries