diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..209f125cd8 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +.docker-compose.yml +Dockerfile +node_modules/ +.cache +.pytest_cache +aws-ip-ranges.json +.git +docs diff --git a/.elasticbeanstalk/.gitignore b/.elasticbeanstalk/.gitignore deleted file mode 100644 index bca646a711..0000000000 --- a/.elasticbeanstalk/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ - -# Elastic Beanstalk Files -.elasticbeanstalk/* -!.elasticbeanstalk/*.cfg.yml -!.elasticbeanstalk/*.global.yml diff --git a/.elasticbeanstalk/config.yml b/.elasticbeanstalk/config.yml deleted file mode 100644 index 39ce025a1c..0000000000 --- a/.elasticbeanstalk/config.yml +++ /dev/null @@ -1,20 +0,0 @@ -branch-defaults: - FF-687: - ec2_keyname: alexkb - environment: 4dn-web-alex-2 - aws_beanstalk: - environment: 4dn-web-prod - group_suffix: null - fix_404_no_auth: - environment: 4dn-web-alex - master: - environment: fourfront-cgap - production: - environment: fourfront-cgap -global: - application_name: 4dn-web - default_ec2_keyname: 4dn-encode - default_platform: Python 3.4 - default_region: us-east-1 - profile: null - sc: git diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 188a21662a..7589c19278 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -25,7 +25,7 @@ jobs: # Build matrix strategy: matrix: - test_type: ['UNIT', 'NPM'] + test_type: ['UNIT', 'NPM', 'Docker'] # Steps represent a sequence of tasks that will be executed as part of the job steps: @@ -42,6 +42,7 @@ jobs: check-latest: false - name: Install/Link Postgres + if: ${{ matrix.test_type == 'NPM' || matrix.test_type == 'UNIT' }} run: | sudo apt-get install curl ca-certificates gnupg curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - @@ -51,6 +52,7 @@ jobs: echo "/usr/lib/postgresql/11/bin" >> $GITHUB_PATH sudo ln -s /usr/lib/postgresql/11/bin/initdb /usr/local/bin/initdb - name: Install Deps + if: ${{ matrix.test_type == 'NPM' || matrix.test_type == 'UNIT' }} run: | node --version make build @@ -117,3 +119,7 @@ jobs: # Until the next version of snovault, the following two are prudent. We can remove them soon. -kmp 9-Mar-2021 poetry run wipe-test-indices $TRAVIS_JOB_ID search-cgap-testing-6-8-vo4mdkmkshvmyddc65ux7dtaou.us-east-1.es.amazonaws.com:443 poetry run wipe-test-indices cgap-test-$TRAVIS_JOB_ID search-cgap-testing-6-8-vo4mdkmkshvmyddc65ux7dtaou.us-east-1.es.amazonaws.com:443 + + - name: Docker Build + if: ${{ matrix.test_type == 'Docker' }} + run: make build-docker-local diff --git a/.gitignore b/.gitignore index 10d02188d8..fea42a1b4b 100644 --- a/.gitignore +++ b/.gitignore @@ -82,3 +82,8 @@ elasticsearch-*.deb # Used for some kinds of debugging in dcicutils, snovault, cgap & ff. DEBUGLOG-* + +# Elastic Beanstalk Files +.elasticbeanstalk/* +!.elasticbeanstalk/*.cfg.yml +!.elasticbeanstalk/*.global.yml diff --git a/CHANGES.rst b/CHANGES.rst deleted file mode 100644 index 6b772de229..0000000000 --- a/CHANGES.rst +++ /dev/null @@ -1,6 +0,0 @@ -Changes -======= - -0.1 (unreleased) ----------------- - diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000..c59b3d5d01 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,140 @@ +# CGAP-Portal (Production) Dockerfile +# Note that images are pinned via sha256 as opposed to tag +# so that we don't pick up new images unintentionally + +# Debian Buster with Python 3.6.13 +# TODO: maybe swap in ubuntu 20.04 and install Python manually? +FROM python@sha256:db248d2d0494973550d323dd6b82af7fc2f4c1e0365769a758abd7fac2aa70db + +MAINTAINER William Ronchetti "william_ronchetti@hms.harvard.edu" + +# Build Arguments +ARG INI_BASE +ENV INI_BASE=${INI_BASE:-"cgap_any_alpha.ini"} + +# Configure (global) Env +ENV NGINX_USER=nginx +ENV DEBIAN_FRONTEND=noninteractive +ENV CRYPTOGRAPHY_DONT_BUILD_RUST=1 +ENV PYTHONFAULTHANDLER=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONHASHSEED=random \ + PIP_NO_CACHE_DIR=off \ + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=100 \ + POETRY_VERSION=1.1.4 \ + NODE_VERSION=12.22.1 + +# Install nginx, base system +COPY deploy/docker/production/install_nginx.sh / +RUN bash /install_nginx.sh && \ + apt-get update && \ + apt-get install -y curl vim emacs postgresql-client net-tools ca-certificates + +# Configure CGAP User (nginx) +WORKDIR /home/nginx/.nvm + +# Install Node +ENV NVM_DIR=/home/nginx/.nvm +RUN apt install -y curl +RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.38.0/install.sh | bash +RUN . "$NVM_DIR/nvm.sh" && nvm install ${NODE_VERSION} +RUN . "$NVM_DIR/nvm.sh" && nvm use v${NODE_VERSION} +RUN . "$NVM_DIR/nvm.sh" && nvm alias default v${NODE_VERSION} +ENV PATH="/home/nginx/.nvm/versions/node/v${NODE_VERSION}/bin/:${PATH}" +RUN node --version +RUN npm --version + +WORKDIR /home/nginx + +# Configure venv +ENV VIRTUAL_ENV=/opt/venv +RUN python -m venv /opt/venv +ENV PATH="$VIRTUAL_ENV/bin:$PATH" + +# Upgrade pip, install in layer +RUN pip install --upgrade pip && \ + pip install poetry==1.1.4 + +# Adjust permissions +RUN chown -R nginx:nginx /opt/venv && \ + mkdir -p /home/nginx/cgap-portal + +WORKDIR /home/nginx/cgap-portal + +# Do the back-end dependency install +COPY pyproject.toml . +COPY poetry.lock . +RUN poetry install --no-root + +# Do the front-end dependency install +COPY package.json . +COPY package-lock.json . +RUN npm ci --no-fund --no-progress --no-optional --no-audit --python=/opt/venv/bin/python + +# Copy over the rest of the code +COPY . . + +# Build remaining back-end +RUN poetry install && \ + python setup_eb.py develop && \ + make fix-dist-info + +# Build front-end +RUN npm run build && \ + npm run build-scss + +# Misc +RUN make aws-ip-ranges && \ + cat /dev/urandom | head -c 256 | base64 > session-secret.b64 + +# Copy config files in (down here for quick debugging) +# Remove default configuration from Nginx +RUN rm /etc/nginx/nginx.conf && \ + rm /etc/nginx/conf.d/default.conf +COPY deploy/docker/production/nginx.conf /etc/nginx/nginx.conf + +# nginx filesystem setup +RUN chown -R nginx:nginx /var/cache/nginx && \ + chown -R nginx:nginx /var/log/nginx && \ + chown -R nginx:nginx /etc/nginx/conf.d && \ + touch /var/run/nginx.pid && \ + chown -R nginx:nginx /var/run/nginx.pid && \ + rm -f /var/log/nginx/* && \ + touch /var/log/nginx/access.log && \ + chown -R nginx:nginx /var/log/nginx/access.log && \ + touch /var/log/nginx/error.log && \ + chown -R nginx:nginx /var/log/nginx/error.log + +# Pull all required files +# Note that *.ini must match the env name in secrets manager! +# Note that deploy/docker/production/entrypoint.sh resolves which entrypoint to run +# based on env variable "application_type". +# For now, this is mastertest. - Will 04/29/21 +COPY deploy/docker/local/docker_development.ini development.ini +COPY deploy/docker/local/entrypoint.sh entrypoint_local.sh +RUN chown nginx:nginx development.ini +RUN chmod +x entrypoint_local.sh + +# Production setup +RUN touch production.ini +RUN chown nginx:nginx production.ini +COPY deploy/docker/production/$INI_BASE deploy/ini_files/. +COPY deploy/docker/production/entrypoint.sh . +COPY deploy/docker/production/entrypoint_portal.sh . +COPY deploy/docker/production/entrypoint_deployment.sh . +COPY deploy/docker/production/entrypoint_indexer.sh . +COPY deploy/docker/production/entrypoint_ingester.sh . +COPY deploy/docker/production/assume_identity.py . +RUN chmod +x entrypoint.sh +RUN chmod +x entrypoint_deployment.sh +RUN chmod +x entrypoint_deployment.sh +RUN chmod +x entrypoint_indexer.sh +RUN chmod +x entrypoint_ingester.sh +RUN chmod +x assume_identity.py +EXPOSE 8000 + +# Container does not run as root +USER nginx + +ENTRYPOINT ["/home/nginx/cgap-portal/entrypoint.sh"] diff --git a/Makefile b/Makefile index 46db912c3f..8c7e77c13f 100644 --- a/Makefile +++ b/Makefile @@ -172,6 +172,49 @@ remote-test-unit: # Note this does the 'indexing' tests update: # updates dependencies poetry update +build-docker-local: + docker-compose build + +build-docker-local-clean: + docker-compose build --no-cache BUILD_PATH=deploy/docker/local + +deploy-docker-local: + docker-compose up -V + +deploy-docker-local-daemon: + docker-compose up -d -V + +ENV_NAME ?= cgap-mastertest +AWS_ACCOUNT ?= 645819926742 + +ecr-login: + @echo "Making ecr-login AWS_ACCOUNT=${AWS_ACCOUNT} ..." + aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin ${AWS_ACCOUNT}.dkr.ecr.us-east-1.amazonaws.com + +rebuild-docker-production: + @echo "Remaking build-docker-production AWS_ACCOUNT=${AWS_ACCOUNT} ENV_NAME=${ENV_NAME} ..." + docker build -t ${ENV_NAME}:latest . --no-cache + make tag-and-push-docker-production + +build-docker-test: + # This will do the equivalent of + # make ecr-login AWS_ACCOUNT= + # make build-docker-production AWS_ACCOUNT= ENV_NAME= + # but it has to do the login inside the script, we can't do it separately here + # because it has to infer the correct AWS_ACCOUNT and ENV_NAME by nosing into + # ~/.aws_test/test_creds.sh looking for ACCOUNT_NUMBER (note: not AWS_ACCOUNT) and ENV_NAME. + scripts/build-docker-test --login # The login must be done inside the script, after inferring account number + +build-docker-production: + @echo "Making build-docker-production AWS_ACCOUNT=${AWS_ACCOUNT} ENV_NAME=${ENV_NAME} ..." + docker build -t ${ENV_NAME}:latest . + make tag-and-push-docker-production ENV_NAME=${ENV_NAME} AWS_ACCOUNT=${AWS_ACCOUNT} + +tag-and-push-docker-production: + @echo "Making tag-and-push-docker-production AWS_ACCOUNT=${AWS_ACCOUNT} ENV_NAME=${ENV_NAME} ..." + docker tag ${ENV_NAME}:latest ${AWS_ACCOUNT}.dkr.ecr.us-east-1.amazonaws.com/${ENV_NAME}:latest + docker push ${AWS_ACCOUNT}.dkr.ecr.us-east-1.amazonaws.com/${ENV_NAME}:latest + help: @make info @@ -199,3 +242,9 @@ info: $(info - Use 'make test' to run tests with normal options similar to what we use on GitHub Actions.) $(info - Use 'make test-any' to run tests without marker constraints (i.e., with no '-m' option).) $(info - Use 'make update' to update dependencies (and the lock file).) + $(info - Use 'make build-docker-local' to build the local Docker image.) + $(info - Use 'make build-docker-local-clean' to build the local Docker image with no cache.) + $(info - Use 'make deploy-docker-local' start up the cluster - pserve output will follow if successful.) + $(info - Use 'make deploy-docker-local-daemon' will start the cluster in daemon mode.) + $(info - Use 'make ecr-login' to login to ECR with the currently sourced AWS creds.) + $(info - Use 'make build-docker-production' to build/tag/push a production image.) diff --git a/README.rst b/README.rst index 84e5cf01ff..68195f0bfc 100644 --- a/README.rst +++ b/README.rst @@ -2,19 +2,7 @@ CGAP PORTAL (HMS-BGM) ======================== -.. image:: https://travis-ci.org/dbmi-bgm/cgap-portal.svg?branch=master - :target: https://travis-ci.org/dbmi-bgm/cgap-portal - -|Coverage|_ - -.. |Coverage| image:: https://coveralls.io/repos/github/4dn-dcic/fourfront/badge.svg?branch=master -.. _Coverage: https://coveralls.io/github/4dn-dcic/fourfront?branch=master - -|Quality|_ - -.. |Quality| image:: https://api.codacy.com/project/badge/Grade/f5fc54006b4740b5800e83eb2aeeeb43 -.. _Quality: https://www.codacy.com/app/4dn/fourfront?utm_source=github.com&utm_medium=referral&utm_content=4dn-dcic/fourfront&utm_campaign=Badge_Grade - +.. image:: https://github.com/dbmi-bgm/cgap-portal/actions/workflows/main.yml/badge.svg .. image:: https://readthedocs.org/projects/cgap-portal/badge/?version=latest @@ -27,3 +15,47 @@ Welcome to CGAP! We are a team of scientists, clinicians, and developers who aim * `cgapwolf `_ for workflow development Be warned that features are under active development and may not be stable! Visit the production deployment for the best experience. For installation and more information on getting started, see our `documentation page `_. + +Note that at this time, CGAP is operating in hybrid model where some environments are deployed to AWS ElasticBeanstalk and others are deployed to AWS Elastic Container Service. The BS deployments are referred to as "legacy deployments" and the ECS deployments are referred to as "alpha deployments". + +For information on how to run CGAP with Docker, see `here. <./docs/source/docker-local.rst>`_ + +For information on CGAP-Docker in production, see `here. <./docs/source/docker-production.rst>`_ + +Navigating this Repository +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Important directories/files are outlined below. + + * ``.github/workflows/`` contains Github Action Workflows + * ``.ebextensions/`` contains the Elastic Beanstalk provisioning scripts + * ``bin/`` contains the few remaining executables + * ``deploy/docker`` contains containerization related scripts/configuration + * ``docs/ contains`` documentation + * ``parts/`` contains WSGI entry points for the Beanstalk setup + * ``scripts/`` contains misc scripts + * ``src/encoded/`` where the code is + * ``.dockerignore`` specifies paths ignored by the Dockerfile + * ``Dockerfile`` contains the Docker build instructions for the cgap-portal - see ``docker-production.rst`` + * ``Makefile`` contains macros for common build operations - see ``make info`` + * ``docker-compose.yml`` builds the new local deployment - see ``docker-local.rst`` + * ``package.json`` and ``package-lock.json`` specify the front-end dependencies + * ``pyproject.toml`` and ``poetry.lock`` specify the back-end dependencies + * ``setup_eb.py`` performs final installation setup + +Navigating src/encoded/ +^^^^^^^^^^^^^^^^^^^^^^^ + +Top level files are modules that make up the core functionality of the back-end. Some modules differ greatly from or do +not even exist in fourfront. Directories are outlined below. + + * ``annotations/`` contains mapping table and ingestion related metadata + * ``commands/`` contains Python commands that can be run on the system from the command line + * ``docs/`` contains ReadTheDocs documentation + * ``ingestion/`` contains ingestion related code, such as mapping table intake and VCF processing + * ``schemas/`` contains the metadata schemas + * ``search/`` contains the search/filter_set APIs + * ``static/`` contains front-end code + * ``tests/`` contains back-end unit tests and insert data + * ``types/`` contains metadata type definitions + * ``upgrade/`` contains collection schema version upgraders - are not functioning as intended currently diff --git a/deploy/deploy_beanstalk.py b/deploy/deploy_beanstalk.py deleted file mode 100644 index 949b75b038..0000000000 --- a/deploy/deploy_beanstalk.py +++ /dev/null @@ -1,171 +0,0 @@ -import os -from time import sleep -import sys -import subprocess -import hashlib -import argparse -from datetime import datetime - - -def tag(name): - subprocess.check_output(['git', 'tag', name, '-m', 'version created for staging deploy']) - subprocess.check_output(['git', 'push', 'origin-travis', name]) - - -def merge(source, merge_to): - res1 = subprocess.check_output(['git', 'status']).decode('utf-8').strip() - - print("status on master is: " + res1) - subprocess.check_output(['git', 'stash']) - - subprocess.check_output( - ['git', 'checkout', merge_to]) - - res = subprocess.check_output(['git', 'status']).decode('utf-8').strip() - print("status on prod is: " + res) - - res2 = subprocess.check_output( - ['git', 'merge', source, '-m', 'merged']).decode('utf-8').strip() - print(res2) - subprocess.check_output( - ['git', 'push', 'origin-travis', merge_to]).decode('utf-8').strip() - subprocess.check_output(['git', 'stash', 'pop']) - - -def get_git_version(): - version = os.environ.get("TRAVIS_COMMIT", "")[:7] - if not version: - version = subprocess.check_output( - ['git', '-C', os.path.dirname(__file__), 'describe']).decode('utf-8').strip() - version = version[:7] - diff = subprocess.check_output( - ['git', '-C', os.path.dirname(__file__), 'diff', '--no-ext-diff']) - if diff: - version += '-patch' + hashlib.sha1(diff).hexdigest()[:7] - return "v-" + version - - -def update_version(version, branch): - filename = 'buildout.cfg' - regex = 's/encoded_version.*/encoded_version = %s/' % (version) - - print("updated buildout.cfg with version", version) - subprocess.check_output( - ['sed', '-i', regex, filename]) - commit_with_previous_msg(filename, branch) - - -def commit_with_previous_msg(filename, branch): - print("adding file to git") - subprocess.check_output( - ['git', 'add', filename]) - - msg = parse(previous_git_commit()) - - print("git commit -m " + msg) - subprocess.check_output( - ['git', 'commit', '-m', 'version bump + ' + msg]) - - subprocess.check_output( - ['git', 'push', 'origin-travis', branch]) - - -def previous_git_commit(): - return subprocess.check_output( - ['git', 'log', '-1'] - ).decode('utf-8').strip() - - -def parse(commit): - author, msg = "", "" - # parse up some commit lines - commit_lines = commit.split('\n') - author = commit_lines[1].split(":")[1].strip() - msg = " ".join(l.strip() for l in commit_lines[3:] if l) - - return "%s - %s" % (author, msg) - - -def deploy(deploy_to=None): - ''' - run eb deploy and show the output - ''' - print("start deployment to elastic beanstalk deploy to is %s" % str(deploy_to)) - - wait = [20, 40, 60, 120, 120, 120, 120] - for time in wait: - try: - if not deploy_to: - p = subprocess.Popen(['eb', 'deploy'], stderr=subprocess.STDOUT, stdout=subprocess.PIPE) - else: - p = subprocess.Popen(['eb', 'deploy', deploy_to], stderr=subprocess.STDOUT, stdout=subprocess.PIPE) - except Exception: - # we often get errors due to timeouts - sleep(time) - else: - break - - time_started = datetime.now() - print('Started deployment at {}. Waiting 2 minutes & exiting.'.format(time_started.strftime('%H:%M:%S:%f'))) - sleep(120) - - # MAYBE TODO: Setup new thread and listen re: "Deploying new version to instance(s).". Exit if this occurs before 2min. - # - #while True: - # out = p.stdout.readline() - # out = out.decode('utf-8') - # curr_time = datetime.now() - # if out != '': - # sys.stdout.write('[' + curr_time.strftime('%H:%M:%S:%f') + '] ' + out) - # sys.stdout.flush() - # if ("Deploying new version to instance(s)." in out) or (time_started + timedelta(minutes=2) <= curr_time): # 2 min time limit - # print('Killing sub-process & exiting.') - # sleep(5) - # p.kill() - # break - # if out == '' and p.poll() is not None: - # print('Deploy sub-process complete. Exiting.') - # break - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="update version if relevant and deploy" - ) - args = parser.parse_args() - branch = os.environ.get("TRAVIS_BRANCH") - merge_to = os.environ.get("tibanna_merge", "").strip() - deploy_to = os.environ.get("tibanna_deploy", "").strip() - - # Ref: https://hms-dbmi.atlassian.net/browse/C4-114 - # Will and I believe this code to be stale and no longer needed. - # It's retained temporarily because it illustrates some interesting tool use - # I might want to borrow later for other purposes. -kmp 8-Apr-2020 - # - # try: - # if deploy_to in ['fourfront-staging', 'fourfront-webprod', 'fourfront-webprod2']: - # print("deploy to staging") - # ver = get_git_version() - # # checkout correct branch - # print("checkout master") - # subprocess.check_output( - # ['git', 'checkout', branch]) - # - # print("update version") - # update_version(ver, branch) - # if merge_to: - # print("merge from %s to %s" % (branch, merge_to)) - # merge(branch, merge_to) - # print("tag it") - # tag(ver) - # except Exception as e: - # # this can all go wrong if somebody pushes during the build - # # or what not, in which case we just won't update the tag / merge - # print("got the following expection but we will ignore it") - # print(e) - # print("switching back to source branch") - # subprocess.check_output( - # ['git', 'checkout', branch]) - - print("now let's deploy") - deploy(deploy_to) diff --git a/deploy/docker/elasticsearch/Dockerfile b/deploy/docker/elasticsearch/Dockerfile new file mode 100644 index 0000000000..826ba8b72a --- /dev/null +++ b/deploy/docker/elasticsearch/Dockerfile @@ -0,0 +1,7 @@ +FROM docker.elastic.co/elasticsearch/elasticsearch:6.8.14 + +MAINTAINER William Ronchetti "william_ronchetti@hms.harvard.edu" + +ENV ELASTICSEARCH_VERSION="6.8.14" +ENV ELASTICSEARCH_SERVICE_PORT=9200 +EXPOSE $ELASTICSEARCH_SERVICE_PORT diff --git a/deploy/docker/local/docker_development.ini b/deploy/docker/local/docker_development.ini new file mode 100644 index 0000000000..377a3b6e9b --- /dev/null +++ b/deploy/docker/local/docker_development.ini @@ -0,0 +1,97 @@ +### +# Docker App Configuration for local deployment +# http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html +# NOTE: Still needs to be customized for (automated use by) multiple users +### + +[app:app] +use = config:base.ini#app +session.secret = %(here)s/session-secret.b64 +env.name = cgap-docker-will-test +sqlalchemy.url = postgres://postgres:postgres@db:5432/postgres +elasticsearch.server = search-cgap-testing-6-8-vo4mdkmkshvmyddc65ux7dtaou.us-east-1.es.amazonaws.com:443 +elasticsearch.aws_auth = true +blob_bucket = encoded-4dn-blobs +metadata_bundles_bucket = elasticbeanstalk-fourfront-cgaplocal-dev-metadata-bundles +load_test_only = true +create_tables = true +testing = true +postgresql.statement_timeout = 20 +mpindexer = true +indexer = true +indexer.namespace = cgap-docker-will-test +pyramid.reload_templates = true +pyramid.debug_authorization = false +pyramid.debug_notfound = false +pyramid.debug_routematch = false +pyramid.default_locale_name = en +# this line determines which load function is used in load_data +# most deployments use: "load_test_data = encoded.loadxl:load_test_data" +load_test_data = encoded.loadxl:load_prod_data +encoded_version = 100.200.300 +snovault_version = 200.300.400 +utils_version = 300.400.500 +eb_app_version = app-v-development-simulation + +[pipeline:debug] +pipeline = + egg:PasteDeploy#prefix + egg:repoze.debug#pdbpm + app +set pyramid.includes = + pyramid_translogger + +[composite:main] +use = egg:rutter#urlmap +/ = debug +/_indexer = indexer + +[composite:indexer] +use = config:base.ini#indexer + +### +# wsgi server configuration +### + +[server:main] +use = egg:waitress#main +host = 0.0.0.0 +port = 6543 +threads = 1 + +### +# logging configuration +# http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html +### + +[loggers] +keys = root, wsgi, encoded + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = INFO +handlers = console + +[logger_wsgi] +level = DEBUG +handlers = +qualname = wsgi + +[logger_encoded] +level = DEBUG +handlers = +qualname = encoded + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s diff --git a/deploy/docker/local/entrypoint.sh b/deploy/docker/local/entrypoint.sh new file mode 100644 index 0000000000..9977741427 --- /dev/null +++ b/deploy/docker/local/entrypoint.sh @@ -0,0 +1,30 @@ +#!/bin/sh + +if [ -z ${TEST+x} ]; then + + if [ ! -z ${LOAD+x} ]; then + + # Clear db/es since this is the local entry point + poetry run clear-db-es-contents development.ini --app-name app --env "$CGAP_ENV_NAME" + + # Create mapping + poetry run create-mapping-on-deploy development.ini --app-name app + + # Load Data (based on development.ini, for now just master-inserts) + poetry run load-data development.ini --app-name app --prod + + fi + + # Start nginx proxy + service nginx start + + # Start application + make deploy2 + +else + + echo "Not starting serving application" + echo "Enter the container with docker exec" + sleep 100000000 + +fi diff --git a/deploy/docker/local/install_nginx.sh b/deploy/docker/local/install_nginx.sh new file mode 100644 index 0000000000..98e82d6187 --- /dev/null +++ b/deploy/docker/local/install_nginx.sh @@ -0,0 +1,77 @@ +# Copied from: https://github.com/nginxinc/docker-nginx/blob/594ce7a8bc26c85af88495ac94d5cd0096b306f7/mainline/buster/Dockerfile + +# Standard set up Nginx +export NGINX_VERSION=1.17.10 +export NJS_VERSION=0.3.9 +export PKG_RELEASE=1~buster + +# Securely provisions deps, installs Nginx, then removes all artifacts +set -x \ + && apt-get update \ + && apt-get install --no-install-recommends --no-install-suggests -y gnupg1 ca-certificates \ + && \ + NGINX_GPGKEY=573BFD6B3D8FBC641079A6ABABF5BD827BD9BF62; \ + found=''; \ + for server in \ + ha.pool.sks-keyservers.net \ + hkp://keyserver.ubuntu.com:80 \ + hkp://p80.pool.sks-keyservers.net:80 \ + pgp.mit.edu \ + ; do \ + echo "Fetching GPG key $NGINX_GPGKEY from $server"; \ + apt-key adv --keyserver "$server" --keyserver-options timeout=10 --recv-keys "$NGINX_GPGKEY" && found=yes && break; \ + done; \ + test -z "$found" && echo >&2 "error: failed to fetch GPG key $NGINX_GPGKEY" && exit 1; \ + apt-get remove --purge --auto-remove -y gnupg1 && rm -rf /var/lib/apt/lists/* \ + && dpkgArch="$(dpkg --print-architecture)" \ + && nginxPackages=" \ + nginx=${NGINX_VERSION}-${PKG_RELEASE} \ + nginx-module-xslt=${NGINX_VERSION}-${PKG_RELEASE} \ + nginx-module-geoip=${NGINX_VERSION}-${PKG_RELEASE} \ + nginx-module-image-filter=${NGINX_VERSION}-${PKG_RELEASE} \ + nginx-module-njs=${NGINX_VERSION}.${NJS_VERSION}-${PKG_RELEASE} \ + " \ + && case "$dpkgArch" in \ + amd64|i386) \ + echo "deb https://nginx.org/packages/mainline/debian/ buster nginx" >> /etc/apt/sources.list.d/nginx.list \ + && apt-get update \ + ;; \ + *) \ + echo "deb-src https://nginx.org/packages/mainline/debian/ buster nginx" >> /etc/apt/sources.list.d/nginx.list \ + \ + && tempDir="$(mktemp -d)" \ + && chmod 777 "$tempDir" \ + \ + && savedAptMark="$(apt-mark showmanual)" \ + \ + && apt-get update \ + && apt-get build-dep -y $nginxPackages \ + && ( \ + cd "$tempDir" \ + && DEB_BUILD_OPTIONS="nocheck parallel=$(nproc)" \ + apt-get source --compile $nginxPackages \ + ) \ + \ + && apt-mark showmanual | xargs apt-mark auto > /dev/null \ + && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \ + \ + && ls -lAFh "$tempDir" \ + && ( cd "$tempDir" && dpkg-scanpackages . > Packages ) \ + && grep '^Package: ' "$tempDir/Packages" \ + && echo "deb [ trusted=yes ] file://$tempDir ./" > /etc/apt/sources.list.d/temp.list \ + && apt-get -o Acquire::GzipIndexes=false update \ + ;; \ + esac \ + \ + && apt-get install --no-install-recommends --no-install-suggests -y \ + $nginxPackages \ + gettext-base \ + && apt-get remove --purge --auto-remove -y ca-certificates && rm -rf /var/lib/apt/lists/* /etc/apt/sources.list.d/nginx.list \ + \ + && if [ -n "$tempDir" ]; then \ + apt-get purge -y --auto-remove \ + && rm -rf "$tempDir" /etc/apt/sources.list.d/temp.list; \ + fi + +# forward request and error logs to docker log collector +ln -sf /dev/stdout /var/log/nginx/access.log && ln -sf /dev/stderr /var/log/nginx/error.log diff --git a/deploy/docker/local/nginx.conf b/deploy/docker/local/nginx.conf new file mode 100644 index 0000000000..4921e4fdb9 --- /dev/null +++ b/deploy/docker/local/nginx.conf @@ -0,0 +1,41 @@ +# Minimal nginx proxy for development + +error_log /var/log/nginx/error.log warn; + +events { + worker_connections 2048; +} +http { + resolver 8.8.8.8; + upstream app { + server 0.0.0.0:6543; + keepalive 10; + } + + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + server { + listen 8000; + access_log /var/log/nginx/access.log main; + location / { + # Normalize duplicate slashes + if ($request ~ ^(GET|HEAD)\s([^?]*)//(.*)\sHTTP/[0-9.]+$) { + return 301 $2/$3; + } + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_pass http://app; + proxy_set_header Connection ""; + } + location ~ ^/_proxy/(.*)$ { + internal; + proxy_set_header Authorization ""; + proxy_set_header Content-Type ""; + proxy_buffering off; + proxy_pass $1$is_args$args; + } + } +} diff --git a/deploy/docker/postgres/Dockerfile b/deploy/docker/postgres/Dockerfile new file mode 100644 index 0000000000..f096e6b7ac --- /dev/null +++ b/deploy/docker/postgres/Dockerfile @@ -0,0 +1,15 @@ +# TODO: upgrade to latest version we can tolerate +FROM postgres:12.3 + +MAINTAINER William Ronchetti "william_ronchetti@hms.harvard.edu" + +# Install some system level dependencies +RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates htop vim emacs curl \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Copy over our custom conf, enable inbound connections +COPY postgresql.conf /etc/postgresql/postgresql.conf +RUN echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/pg_hba.conf + +ENV PGDATA=/var/lib/postgresql/data/pgdata diff --git a/deploy/docker/postgres/postgresql.conf b/deploy/docker/postgres/postgresql.conf new file mode 100644 index 0000000000..b01d14fc41 --- /dev/null +++ b/deploy/docker/postgres/postgresql.conf @@ -0,0 +1,642 @@ +# ----------------------------- +# PostgreSQL configuration file +# ----------------------------- +# +# This file consists of lines of the form: +# +# name = value +# +# (The "=" is optional.) Whitespace may be used. Comments are introduced with +# "#" anywhere on a line. The complete list of parameter names and allowed +# values can be found in the PostgreSQL documentation. +# +# The commented-out settings shown in this file represent the default values. +# Re-commenting a setting is NOT sufficient to revert it to the default value; +# you need to reload the server. +# +# This file is read on server startup and when the server receives a SIGHUP +# signal. If you edit the file on a running system, you have to SIGHUP the +# server for the changes to take effect, or use "pg_ctl reload". Some +# parameters, which are marked below, require a server shutdown and restart to +# take effect. +# +# Any parameter can also be given as a command-line option to the server, e.g., +# "postgres -c log_connections=on". Some parameters can be changed at run time +# with the "SET" SQL command. +# +# Memory units: kB = kilobytes Time units: ms = milliseconds +# MB = megabytes s = seconds +# GB = gigabytes min = minutes +# TB = terabytes h = hours +# d = days + + +#------------------------------------------------------------------------------ +# FILE LOCATIONS +#------------------------------------------------------------------------------ + +# The default values of these variables are driven from the -D command-line +# option or PGDATA environment variable, represented here as ConfigDir. + +#data_directory = 'ConfigDir' # use data in another directory +# (change requires restart) +#hba_file = 'ConfigDir/pg_hba.conf' # host-based authentication file +# (change requires restart) +#ident_file = 'ConfigDir/pg_ident.conf' # ident configuration file +# (change requires restart) + +# If external_pid_file is not explicitly set, no extra PID file is written. +#external_pid_file = '' # write an extra PID file +# (change requires restart) + + +#------------------------------------------------------------------------------ +# CONNECTIONS AND AUTHENTICATION +#------------------------------------------------------------------------------ + +# - Connection Settings - + +# Only necessary change from defaults +listen_addresses = '*' +# comma-separated list of addresses; +# defaults to 'localhost'; use '*' for all +# (change requires restart) +#port = 5432 # (change requires restart) +#max_connections = 100 # (change requires restart) +#superuser_reserved_connections = 3 # (change requires restart) +#unix_socket_directories = '/tmp' # comma-separated list of directories +# (change requires restart) +#unix_socket_group = '' # (change requires restart) +#unix_socket_permissions = 0777 # begin with 0 to use octal notation +# (change requires restart) +#bonjour = off # advertise server via Bonjour +# (change requires restart) +#bonjour_name = '' # defaults to the computer name +# (change requires restart) + +# - Security and Authentication - + +#authentication_timeout = 1min # 1s-600s +#ssl = off # (change requires restart) +#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers +# (change requires restart) +#ssl_prefer_server_ciphers = on # (change requires restart) +#ssl_ecdh_curve = 'prime256v1' # (change requires restart) +#ssl_cert_file = 'server.crt' # (change requires restart) +#ssl_key_file = 'server.key' # (change requires restart) +#ssl_ca_file = '' # (change requires restart) +#ssl_crl_file = '' # (change requires restart) +#password_encryption = on +#db_user_namespace = off +#row_security = on + +# GSSAPI using Kerberos +#krb_server_keyfile = '' +#krb_caseins_users = off + +# - TCP Keepalives - +# see "man 7 tcp" for details + +#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds; +# 0 selects the system default +#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds; +# 0 selects the system default +#tcp_keepalives_count = 0 # TCP_KEEPCNT; +# 0 selects the system default + + +#------------------------------------------------------------------------------ +# RESOURCE USAGE (except WAL) +#------------------------------------------------------------------------------ + +# - Memory - + +#shared_buffers = 32MB # min 128kB +# (change requires restart) +#huge_pages = try # on, off, or try +# (change requires restart) +#temp_buffers = 8MB # min 800kB +#max_prepared_transactions = 0 # zero disables the feature +# (change requires restart) +# Caution: it is not advisable to set max_prepared_transactions nonzero unless +# you actively intend to use prepared transactions. +#work_mem = 4MB # min 64kB +#maintenance_work_mem = 64MB # min 1MB +#replacement_sort_tuples = 150000 # limits use of replacement selection sort +#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem +#max_stack_depth = 2MB # min 100kB +#dynamic_shared_memory_type = posix # the default is the first option +# supported by the operating system: +# posix +# sysv +# windows +# mmap +# use none to disable dynamic shared memory + +# - Disk - + +#temp_file_limit = -1 # limits per-process temp file space +# in kB, or -1 for no limit + +# - Kernel Resource Usage - + +#max_files_per_process = 1000 # min 25 +# (change requires restart) +#shared_preload_libraries = '' # (change requires restart) + +# - Cost-Based Vacuum Delay - + +#vacuum_cost_delay = 0 # 0-100 milliseconds +#vacuum_cost_page_hit = 1 # 0-10000 credits +#vacuum_cost_page_miss = 10 # 0-10000 credits +#vacuum_cost_page_dirty = 20 # 0-10000 credits +#vacuum_cost_limit = 200 # 1-10000 credits + +# - Background Writer - + +#bgwriter_delay = 200ms # 10-10000ms between rounds +#bgwriter_lru_maxpages = 100 # 0-1000 max buffers written/round +#bgwriter_lru_multiplier = 2.0 # 0-10.0 multiplier on buffers scanned/round +#bgwriter_flush_after = 0 # measured in pages, 0 disables + +# - Asynchronous Behavior - + +#effective_io_concurrency = 1 # 1-1000; 0 disables prefetching +#max_worker_processes = 8 # (change requires restart) +#max_parallel_workers_per_gather = 0 # taken from max_worker_processes +#old_snapshot_threshold = -1 # 1min-60d; -1 disables; 0 is immediate +# (change requires restart) +#backend_flush_after = 0 # measured in pages, 0 disables + + +#------------------------------------------------------------------------------ +# WRITE AHEAD LOG +#------------------------------------------------------------------------------ + +# - Settings - + +#wal_level = minimal # minimal, replica, or logical +# (change requires restart) +#fsync = on # flush data to disk for crash safety +# (turning this off can cause +# unrecoverable data corruption) +#synchronous_commit = on # synchronization level; +# off, local, remote_write, remote_apply, or on +#wal_sync_method = fsync # the default is the first option +# supported by the operating system: +# open_datasync +# fdatasync (default on Linux) +# fsync +# fsync_writethrough +# open_sync +#full_page_writes = on # recover from partial page writes +#wal_compression = off # enable compression of full-page writes +#wal_log_hints = off # also do full page writes of non-critical updates +# (change requires restart) +#wal_buffers = -1 # min 32kB, -1 sets based on shared_buffers +# (change requires restart) +#wal_writer_delay = 200ms # 1-10000 milliseconds +#wal_writer_flush_after = 1MB # measured in pages, 0 disables + +#commit_delay = 0 # range 0-100000, in microseconds +#commit_siblings = 5 # range 1-1000 + +# - Checkpoints - + +#checkpoint_timeout = 5min # range 30s-1d +#max_wal_size = 1GB +#min_wal_size = 80MB +#checkpoint_completion_target = 0.5 # checkpoint target duration, 0.0 - 1.0 +#checkpoint_flush_after = 0 # measured in pages, 0 disables +#checkpoint_warning = 30s # 0 disables + +# - Archiving - + +#archive_mode = off # enables archiving; off, on, or always +# (change requires restart) +#archive_command = '' # command to use to archive a logfile segment +# placeholders: %p = path of file to archive +# %f = file name only +# e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f' +#archive_timeout = 0 # force a logfile segment switch after this +# number of seconds; 0 disables + + +#------------------------------------------------------------------------------ +# REPLICATION +#------------------------------------------------------------------------------ + +# - Sending Server(s) - + +# Set these on the master and on any standby that will send replication data. + +#max_wal_senders = 0 # max number of walsender processes +# (change requires restart) +#wal_keep_segments = 0 # in logfile segments, 16MB each; 0 disables +#wal_sender_timeout = 60s # in milliseconds; 0 disables + +#max_replication_slots = 0 # max number of replication slots +# (change requires restart) +#track_commit_timestamp = off # collect timestamp of transaction commit +# (change requires restart) + +# - Master Server - + +# These settings are ignored on a standby server. + +#synchronous_standby_names = '' # standby servers that provide sync rep +# number of sync standbys and comma-separated list of application_name +# from standby(s); '*' = all +#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed + +# - Standby Servers - + +# These settings are ignored on a master server. + +#hot_standby = off # "on" allows queries during recovery +# (change requires restart) +#max_standby_archive_delay = 30s # max delay before canceling queries +# when reading WAL from archive; +# -1 allows indefinite delay +#max_standby_streaming_delay = 30s # max delay before canceling queries +# when reading streaming WAL; +# -1 allows indefinite delay +#wal_receiver_status_interval = 10s # send replies at least this often +# 0 disables +#hot_standby_feedback = off # send info from standby to prevent +# query conflicts +#wal_receiver_timeout = 60s # time that receiver waits for +# communication from master +# in milliseconds; 0 disables +#wal_retrieve_retry_interval = 5s # time to wait before retrying to +# retrieve WAL after a failed attempt + + +#------------------------------------------------------------------------------ +# QUERY TUNING +#------------------------------------------------------------------------------ + +# - Planner Method Configuration - + +#enable_bitmapscan = on +#enable_hashagg = on +#enable_hashjoin = on +#enable_indexscan = on +#enable_indexonlyscan = on +#enable_material = on +#enable_mergejoin = on +#enable_nestloop = on +#enable_seqscan = on +#enable_sort = on +#enable_tidscan = on + +# - Planner Cost Constants - + +#seq_page_cost = 1.0 # measured on an arbitrary scale +#random_page_cost = 4.0 # same scale as above +#cpu_tuple_cost = 0.01 # same scale as above +#cpu_index_tuple_cost = 0.005 # same scale as above +#cpu_operator_cost = 0.0025 # same scale as above +#parallel_tuple_cost = 0.1 # same scale as above +#parallel_setup_cost = 1000.0 # same scale as above +#min_parallel_relation_size = 8MB +#effective_cache_size = 4GB + +# - Genetic Query Optimizer - + +#geqo = on +#geqo_threshold = 12 +#geqo_effort = 5 # range 1-10 +#geqo_pool_size = 0 # selects default based on effort +#geqo_generations = 0 # selects default based on effort +#geqo_selection_bias = 2.0 # range 1.5-2.0 +#geqo_seed = 0.0 # range 0.0-1.0 + +# - Other Planner Options - + +#default_statistics_target = 100 # range 1-10000 +#constraint_exclusion = partition # on, off, or partition +#cursor_tuple_fraction = 0.1 # range 0.0-1.0 +#from_collapse_limit = 8 +#join_collapse_limit = 8 # 1 disables collapsing of explicit +# JOIN clauses +#force_parallel_mode = off + + +#------------------------------------------------------------------------------ +# ERROR REPORTING AND LOGGING +#------------------------------------------------------------------------------ + +# - Where to Log - + +#log_destination = 'stderr' # Valid values are combinations of +# stderr, csvlog, syslog, and eventlog, +# depending on platform. csvlog +# requires logging_collector to be on. + +# This is used when logging to stderr: +#logging_collector = off # Enable capturing of stderr and csvlog +# into log files. Required to be on for +# csvlogs. +# (change requires restart) + +# These are only used if logging_collector is on: +#log_directory = 'pg_log' # directory where log files are written, +# can be absolute or relative to PGDATA +#log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log' # log file name pattern, +# can include strftime() escapes +#log_file_mode = 0600 # creation mode for log files, +# begin with 0 to use octal notation +#log_truncate_on_rotation = off # If on, an existing log file with the +# same name as the new log file will be +# truncated rather than appended to. +# But such truncation only occurs on +# time-driven rotation, not on restarts +# or size-driven rotation. Default is +# off, meaning append to existing files +# in all cases. +#log_rotation_age = 1d # Automatic rotation of logfiles will +# happen after that time. 0 disables. +#log_rotation_size = 10MB # Automatic rotation of logfiles will +# happen after that much log output. +# 0 disables. + +# These are relevant when logging to syslog: +#syslog_facility = 'LOCAL0' +#syslog_ident = 'postgres' +#syslog_sequence_numbers = on +#syslog_split_messages = on + +# This is only relevant when logging to eventlog (win32): +#event_source = 'PostgreSQL' + +# - When to Log - + +#client_min_messages = notice # values in order of decreasing detail: +# debug5 +# debug4 +# debug3 +# debug2 +# debug1 +# log +# notice +# warning +# error + +#log_min_messages = warning # values in order of decreasing detail: +# debug5 +# debug4 +# debug3 +# debug2 +# debug1 +# info +# notice +# warning +# error +# log +# fatal +# panic + +#log_min_error_statement = error # values in order of decreasing detail: +# debug5 +# debug4 +# debug3 +# debug2 +# debug1 +# info +# notice +# warning +# error +# log +# fatal +# panic (effectively off) + +#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements +# and their durations, > 0 logs only +# statements running at least this number +# of milliseconds + + +# - What to Log - + +#debug_print_parse = off +#debug_print_rewritten = off +#debug_print_plan = off +#debug_pretty_print = on +#log_checkpoints = off +#log_connections = off +#log_disconnections = off +#log_duration = off +#log_error_verbosity = default # terse, default, or verbose messages +#log_hostname = off +#log_line_prefix = '' # special values: +# %a = application name +# %u = user name +# %d = database name +# %r = remote host and port +# %h = remote host +# %p = process ID +# %t = timestamp without milliseconds +# %m = timestamp with milliseconds +# %n = timestamp with milliseconds (as a Unix epoch) +# %i = command tag +# %e = SQL state +# %c = session ID +# %l = session line number +# %s = session start timestamp +# %v = virtual transaction ID +# %x = transaction ID (0 if none) +# %q = stop here in non-session +# processes +# %% = '%' +# e.g. '<%u%%%d> ' +#log_lock_waits = off # log lock waits >= deadlock_timeout +#log_statement = 'none' # none, ddl, mod, all +#log_replication_commands = off +#log_temp_files = -1 # log temporary files equal or larger +# than the specified size in kilobytes; +# -1 disables, 0 logs all temp files +#log_timezone = 'GMT' + + +# - Process Title - + +#cluster_name = '' # added to process titles if nonempty +# (change requires restart) +#update_process_title = on + + +#------------------------------------------------------------------------------ +# RUNTIME STATISTICS +#------------------------------------------------------------------------------ + +# - Query/Index Statistics Collector - + +#track_activities = on +#track_counts = on +#track_io_timing = off +#track_functions = none # none, pl, all +#track_activity_query_size = 1024 # (change requires restart) +#stats_temp_directory = 'pg_stat_tmp' + + +# - Statistics Monitoring - + +#log_parser_stats = off +#log_planner_stats = off +#log_executor_stats = off +#log_statement_stats = off + + +#------------------------------------------------------------------------------ +# AUTOVACUUM PARAMETERS +#------------------------------------------------------------------------------ + +#autovacuum = on # Enable autovacuum subprocess? 'on' +# requires track_counts to also be on. +#log_autovacuum_min_duration = -1 # -1 disables, 0 logs all actions and +# their durations, > 0 logs only +# actions running at least this number +# of milliseconds. +#autovacuum_max_workers = 3 # max number of autovacuum subprocesses +# (change requires restart) +#autovacuum_naptime = 1min # time between autovacuum runs +#autovacuum_vacuum_threshold = 50 # min number of row updates before +# vacuum +#autovacuum_analyze_threshold = 50 # min number of row updates before +# analyze +#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum +#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze +#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum +# (change requires restart) +#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age +# before forced vacuum +# (change requires restart) +#autovacuum_vacuum_cost_delay = 20ms # default vacuum cost delay for +# autovacuum, in milliseconds; +# -1 means use vacuum_cost_delay +#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for +# autovacuum, -1 means use +# vacuum_cost_limit + + +#------------------------------------------------------------------------------ +# CLIENT CONNECTION DEFAULTS +#------------------------------------------------------------------------------ + +# - Statement Behavior - + +#search_path = '"$user", public' # schema names +#default_tablespace = '' # a tablespace name, '' uses the default +#temp_tablespaces = '' # a list of tablespace names, '' uses +# only default tablespace +#check_function_bodies = on +#default_transaction_isolation = 'read committed' +#default_transaction_read_only = off +#default_transaction_deferrable = off +#session_replication_role = 'origin' +#statement_timeout = 0 # in milliseconds, 0 is disabled +#lock_timeout = 0 # in milliseconds, 0 is disabled +#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled +#vacuum_freeze_min_age = 50000000 +#vacuum_freeze_table_age = 150000000 +#vacuum_multixact_freeze_min_age = 5000000 +#vacuum_multixact_freeze_table_age = 150000000 +#bytea_output = 'hex' # hex, escape +#xmlbinary = 'base64' +#xmloption = 'content' +#gin_fuzzy_search_limit = 0 +#gin_pending_list_limit = 4MB + +# - Locale and Formatting - + +#datestyle = 'iso, mdy' +#intervalstyle = 'postgres' +#timezone = 'GMT' +#timezone_abbreviations = 'Default' # Select the set of available time zone +# abbreviations. Currently, there are +# Default +# Australia (historical usage) +# India +# You can create your own file in +# share/timezonesets/. +#extra_float_digits = 0 # min -15, max 3 +#client_encoding = sql_ascii # actually, defaults to database +# encoding + +# These settings are initialized by initdb, but they can be changed. +#lc_messages = 'C' # locale for system error message +# strings +#lc_monetary = 'C' # locale for monetary formatting +#lc_numeric = 'C' # locale for number formatting +#lc_time = 'C' # locale for time formatting + +# default configuration for text search +#default_text_search_config = 'pg_catalog.simple' + +# - Other Defaults - + +#dynamic_library_path = '$libdir' +#local_preload_libraries = '' +#session_preload_libraries = '' + + +#------------------------------------------------------------------------------ +# LOCK MANAGEMENT +#------------------------------------------------------------------------------ + +#deadlock_timeout = 1s +#max_locks_per_transaction = 64 # min 10 +# (change requires restart) +#max_pred_locks_per_transaction = 64 # min 10 +# (change requires restart) + + +#------------------------------------------------------------------------------ +# VERSION/PLATFORM COMPATIBILITY +#------------------------------------------------------------------------------ + +# - Previous PostgreSQL Versions - + +#array_nulls = on +#backslash_quote = safe_encoding # on, off, or safe_encoding +#default_with_oids = off +#escape_string_warning = on +#lo_compat_privileges = off +#operator_precedence_warning = off +#quote_all_identifiers = off +#sql_inheritance = on +#standard_conforming_strings = on +#synchronize_seqscans = on + +# - Other Platforms and Clients - + +#transform_null_equals = off + + +#------------------------------------------------------------------------------ +# ERROR HANDLING +#------------------------------------------------------------------------------ + +#exit_on_error = off # terminate session on any error? +#restart_after_crash = on # reinitialize after backend crash? + + +#------------------------------------------------------------------------------ +# CONFIG FILE INCLUDES +#------------------------------------------------------------------------------ + +# These options allow settings to be loaded from files other than the +# default postgresql.conf. + +#include_dir = 'conf.d' # include files ending in '.conf' from +# directory 'conf.d' +#include_if_exists = 'exists.conf' # include file only if it exists +#include = 'special.conf' # include file + + +#------------------------------------------------------------------------------ +# CUSTOMIZED OPTIONS +#------------------------------------------------------------------------------ + +# Add settings for extensions here \ No newline at end of file diff --git a/deploy/docker/production/assume_identity.py b/deploy/docker/production/assume_identity.py new file mode 100644 index 0000000000..e43ebd992e --- /dev/null +++ b/deploy/docker/production/assume_identity.py @@ -0,0 +1,45 @@ +# This code snippet is adapated from AWS ECS Documentation +# The plan is 'create' environments by uploading Secrets to AWS Secrets Manager +# and retrieve the secrets upon deployment. +# The idea is the container runner will have an assumed IAM role granting access +# specifically to the secret + +import os +import logging +from dcicutils.qa_utils import override_environ +from dcicutils.deployment_utils import IniFileManager +from dcicutils.secrets_utils import assume_identity + + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__file__) +_MY_DIR = os.path.dirname(__file__) + + +class CGAPDockerIniFileManager(IniFileManager): + """ This runs at top level, so path is slightly different. """ + # should work but doesn't (missing cgap-portal): os.path.join(os.path.dirname(_MY_DIR), "pyproject.toml") + # expected = + # actual = + # assert actual == expected, "The actual value %s was not what we expected, %s." % (actual, expected) + TEMPLATE_DIR = '/home/nginx/cgap-portal/deploy/ini_files' + PYPROJECT_FILE_NAME = '/home/nginx/cgap-portal/pyproject.toml' + + +def build_production_ini_from_global_application_configuration(): + """ This function makes a request to secrets manager for the identity passed to the container. + See documentation on API in dcicutils. + """ + identity = assume_identity() + + # build production.ini + with override_environ(**identity): + + CGAPDockerIniFileManager.build_ini_file_from_template( + '/home/nginx/cgap-portal/deploy/ini_files/cgap_any_alpha.ini', + '/home/nginx/cgap-portal/production.ini' + ) + + +if __name__ == '__main__': + build_production_ini_from_global_application_configuration() diff --git a/deploy/docker/production/cgap_any_alpha.ini b/deploy/docker/production/cgap_any_alpha.ini new file mode 100644 index 0000000000..4ca53b80f9 --- /dev/null +++ b/deploy/docker/production/cgap_any_alpha.ini @@ -0,0 +1,83 @@ +[app:app] +use = config:base.ini#app +session.secret = %(here)s/session-secret.b64 +file_upload_bucket = ${FILE_UPLOAD_BUCKET} +file_wfout_bucket = ${FILE_WFOUT_BUCKET} +blob_bucket = ${BLOB_BUCKET} +system_bucket = ${SYSTEM_BUCKET} +metadata_bundles_bucket = ${METADATA_BUNDLES_BUCKET} +sentry_dsn = ${SENTRY_DSN} +# blob_store_profile_name = encoded-4dn-files +accession_factory = encoded.server_defaults.enc_accession +elasticsearch.server = ${ES_SERVER} +snovault.app_version = ask-pip +env.name = ${BS_ENV} +mirror.env.name = ${BS_MIRROR_ENV} +encoded_version = ${PROJECT_VERSION} +eb_app_version = ${APP_VERSION} +snovault_version = ${SNOVAULT_VERSION} +utils_version = ${UTILS_VERSION} +mpindexer = false +indexer = ${INDEXER} +indexer.namespace = ${ES_NAMESPACE} +index_server = ${INDEX_SERVER} +elasticsearch.aws_auth = true +production = true +load_test_data = encoded.loadxl:load_${DATA_SET}_data +sqlalchemy.url = postgresql://${RDS_USERNAME}:${RDS_PASSWORD}@${RDS_HOSTNAME}:${RDS_PORT}/${RDS_DB_NAME} + +[composite:indexer] +use = config:base.ini#indexer + +[pipeline:main] +pipeline = + config:base.ini#memlimit + egg:PasteDeploy#prefix + app + +[pipeline:debug] +pipeline = + egg:repoze.debug#pdbpm + app +set pyramid.includes = + pyramid_translogger + +[server:main] +use = egg:waitress#main +host = 0.0.0.0 +port = 6543 +threads = 1 + +[loggers] +keys = root, encoded, encoded_listener + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console + +[logger_encoded] +level = WARN +handlers = console +qualname = encoded +propagate = 0 + +[logger_encoded_listener] +level = INFO +handlers = console +qualname = snovault.elasticsearch.es_index_listener +propagate = 0 + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(message)s \ No newline at end of file diff --git a/deploy/docker/production/entrypoint.sh b/deploy/docker/production/entrypoint.sh new file mode 100644 index 0000000000..4089d44318 --- /dev/null +++ b/deploy/docker/production/entrypoint.sh @@ -0,0 +1,42 @@ +#!/bin/sh + + +# Global CGAP Application Entrypoint +# This script resolves which application type is desired based on +# the "$application_type" environment variable. Possible options are: +# * "deployment" to run the deployment +# * "ingester" to run the production ingester (forever) +# * "indexer" to run the production indexer (forever) +# * "portal" to run the production portal worker (API back-end) +# * "local" to run a local deployment + +# Note that only "local" can be run from the local machine +# but the same image build is run across the entire local/production stack. + +deployment="deployment" +ingester="ingester" +indexer="indexer" +portal="portal" +local="local" + +echo "Resolving which entrypoint is desired" + +# shellcheck disable=SC2154 +if [ "$application_type" = $deployment ]; then + sh entrypoint_deployment.sh +elif [ "$application_type" = $ingester ]; then + sh entrypoint_ingester.sh +elif [ "$application_type" = $indexer ]; then + sh entrypoint_indexer.sh +elif [ "$application_type" = $portal ]; then + sh entrypoint_portal.sh +elif [ "$application_type" = $local ]; then + sh entrypoint_local.sh +else + echo "Could not resolve entrypoint! Check that \$application_type is set." + exit 1 +fi + +exit 0 + + diff --git a/deploy/docker/production/entrypoint_deployment.sh b/deploy/docker/production/entrypoint_deployment.sh new file mode 100644 index 0000000000..74aad6465b --- /dev/null +++ b/deploy/docker/production/entrypoint_deployment.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +echo "Running a CGAP deployment on the given environment" + +# Run assume_identity.py to access the desired deployment configuration from +# secrets manager - this builds production.ini +poetry run python -m assume_identity + +# Clear db/es since this is the local entry point +# 'skip' is provided as an argument so that this step doesn't run +poetry run clear-db-es-contents production.ini --app-name app --env skip + +## Create mapping +poetry run create-mapping-on-deploy production.ini --app-name app + +# Load Data (based on development.ini, for now just master-inserts) +# Not necessary after first deploy +if [ -n "${INITIAL_DEPLOYMENT}" ]; then + poetry run load-data production.ini --app-name app --prod +fi + +# Load access keys +# Note that the secret name must match that which was created for this environment +poetry run load-access-keys production.ini --app-name app --secret-name "$IDENTITY" + +exit 0 diff --git a/deploy/docker/production/entrypoint_indexer.sh b/deploy/docker/production/entrypoint_indexer.sh new file mode 100644 index 0000000000..f0eacfc22d --- /dev/null +++ b/deploy/docker/production/entrypoint_indexer.sh @@ -0,0 +1,16 @@ +#!/bin/sh + +echo "Starting up CGAP-Portal Indexer" + +# Run assume_identity.py to access the desired deployment configuration from +# secrets manager - this builds production.ini +poetry run python -m assume_identity + +# Start indexer, run forever +while true +do + poetry run es-index-data production.ini --app-name app || echo "Indexing Runtime Error thrown - check previous output" + sleep 1 +done + +exit 0 diff --git a/deploy/docker/production/entrypoint_ingester.sh b/deploy/docker/production/entrypoint_ingester.sh new file mode 100644 index 0000000000..4f506e290f --- /dev/null +++ b/deploy/docker/production/entrypoint_ingester.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +echo "Starting CGAP-Portal Ingester" + +# Run assume_identity.py to access the desired deployment configuration from +# secrets manager - this builds production.ini +poetry run python -m assume_identity + +# will serve forever +poetry run ingestion-listener production.ini --app-name app diff --git a/deploy/docker/production/entrypoint_portal.sh b/deploy/docker/production/entrypoint_portal.sh new file mode 100644 index 0000000000..a974b8f407 --- /dev/null +++ b/deploy/docker/production/entrypoint_portal.sh @@ -0,0 +1,13 @@ +#!/bin/sh + +echo "Starting up CGAP-Portal WSGI" + +# Run assume_identity.py to access the desired deployment configuration from +# secrets manager - this builds production.ini +poetry run python -m assume_identity + +# Start nginx proxy +service nginx start + +# Start application +pserve production.ini diff --git a/deploy/docker/production/install_nginx.sh b/deploy/docker/production/install_nginx.sh new file mode 100644 index 0000000000..98e82d6187 --- /dev/null +++ b/deploy/docker/production/install_nginx.sh @@ -0,0 +1,77 @@ +# Copied from: https://github.com/nginxinc/docker-nginx/blob/594ce7a8bc26c85af88495ac94d5cd0096b306f7/mainline/buster/Dockerfile + +# Standard set up Nginx +export NGINX_VERSION=1.17.10 +export NJS_VERSION=0.3.9 +export PKG_RELEASE=1~buster + +# Securely provisions deps, installs Nginx, then removes all artifacts +set -x \ + && apt-get update \ + && apt-get install --no-install-recommends --no-install-suggests -y gnupg1 ca-certificates \ + && \ + NGINX_GPGKEY=573BFD6B3D8FBC641079A6ABABF5BD827BD9BF62; \ + found=''; \ + for server in \ + ha.pool.sks-keyservers.net \ + hkp://keyserver.ubuntu.com:80 \ + hkp://p80.pool.sks-keyservers.net:80 \ + pgp.mit.edu \ + ; do \ + echo "Fetching GPG key $NGINX_GPGKEY from $server"; \ + apt-key adv --keyserver "$server" --keyserver-options timeout=10 --recv-keys "$NGINX_GPGKEY" && found=yes && break; \ + done; \ + test -z "$found" && echo >&2 "error: failed to fetch GPG key $NGINX_GPGKEY" && exit 1; \ + apt-get remove --purge --auto-remove -y gnupg1 && rm -rf /var/lib/apt/lists/* \ + && dpkgArch="$(dpkg --print-architecture)" \ + && nginxPackages=" \ + nginx=${NGINX_VERSION}-${PKG_RELEASE} \ + nginx-module-xslt=${NGINX_VERSION}-${PKG_RELEASE} \ + nginx-module-geoip=${NGINX_VERSION}-${PKG_RELEASE} \ + nginx-module-image-filter=${NGINX_VERSION}-${PKG_RELEASE} \ + nginx-module-njs=${NGINX_VERSION}.${NJS_VERSION}-${PKG_RELEASE} \ + " \ + && case "$dpkgArch" in \ + amd64|i386) \ + echo "deb https://nginx.org/packages/mainline/debian/ buster nginx" >> /etc/apt/sources.list.d/nginx.list \ + && apt-get update \ + ;; \ + *) \ + echo "deb-src https://nginx.org/packages/mainline/debian/ buster nginx" >> /etc/apt/sources.list.d/nginx.list \ + \ + && tempDir="$(mktemp -d)" \ + && chmod 777 "$tempDir" \ + \ + && savedAptMark="$(apt-mark showmanual)" \ + \ + && apt-get update \ + && apt-get build-dep -y $nginxPackages \ + && ( \ + cd "$tempDir" \ + && DEB_BUILD_OPTIONS="nocheck parallel=$(nproc)" \ + apt-get source --compile $nginxPackages \ + ) \ + \ + && apt-mark showmanual | xargs apt-mark auto > /dev/null \ + && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \ + \ + && ls -lAFh "$tempDir" \ + && ( cd "$tempDir" && dpkg-scanpackages . > Packages ) \ + && grep '^Package: ' "$tempDir/Packages" \ + && echo "deb [ trusted=yes ] file://$tempDir ./" > /etc/apt/sources.list.d/temp.list \ + && apt-get -o Acquire::GzipIndexes=false update \ + ;; \ + esac \ + \ + && apt-get install --no-install-recommends --no-install-suggests -y \ + $nginxPackages \ + gettext-base \ + && apt-get remove --purge --auto-remove -y ca-certificates && rm -rf /var/lib/apt/lists/* /etc/apt/sources.list.d/nginx.list \ + \ + && if [ -n "$tempDir" ]; then \ + apt-get purge -y --auto-remove \ + && rm -rf "$tempDir" /etc/apt/sources.list.d/temp.list; \ + fi + +# forward request and error logs to docker log collector +ln -sf /dev/stdout /var/log/nginx/access.log && ln -sf /dev/stderr /var/log/nginx/error.log diff --git a/deploy/docker/production/nginx.conf b/deploy/docker/production/nginx.conf new file mode 100644 index 0000000000..5362ef77c2 --- /dev/null +++ b/deploy/docker/production/nginx.conf @@ -0,0 +1,43 @@ +# Production nginx setup +# For now, this looks exactly like the local deployment +# It certainly needs updating + +error_log /var/log/nginx/error.log warn; + +events { + worker_connections 1024; +} +http { + resolver 8.8.8.8; + upstream app { + server 0.0.0.0:6543; + keepalive 10; + } + + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + server { + listen 8000; + access_log /var/log/nginx/access.log main; + location / { + # Normalize duplicate slashes + if ($request ~ ^(GET|HEAD)\s([^?]*)//(.*)\sHTTP/[0-9.]+$) { + return 301 $2/$3; + } + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_pass http://app; + proxy_set_header Connection ""; + } + location ~ ^/_proxy/(.*)$ { + internal; + proxy_set_header Authorization ""; + proxy_set_header Content-Type ""; + proxy_buffering off; + proxy_pass $1$is_args$args; + } + } +} diff --git a/deploy/tests/test_deploy_beanstalk.py b/deploy/tests/test_deploy_beanstalk.py deleted file mode 100644 index a2ced93c7b..0000000000 --- a/deploy/tests/test_deploy_beanstalk.py +++ /dev/null @@ -1,33 +0,0 @@ -import pytest - -from unittest import mock -from ..deploy_beanstalk import previous_git_commit, parse - - -GIT_LOG_OUTPUT = '''commit bc061fc755015162741eec71f1a71ea6c3fdb786 -Author: j1z0 -Date: Thu Sep 22 22:23:54 2016 -0400 - - we need .aws for both master and production - -''' - - -pytestmark = [pytest.mark.working, pytest.mark.unit] - - -def test_parse_git_commit(): - author = "j1z0 " - msg = "we need .aws for both master and production" - expected = "%s - %s" % (author, msg) - actual = parse(GIT_LOG_OUTPUT) - - print("expected result: ", expected) - print("actual result: ", actual) - assert expected == actual - - -if __name__ == "__main__": - print(".") - test_parse_git_commit() - print("all good!") diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000000..61609cbc3e --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,62 @@ +version: "3.8" + +services: + + # Postgres Component + # Connect to it in development.ini with + # sqlalchemy.url = postgresql://postgres:postgres@db:5432/postgres + db: + build: ./deploy/docker/postgres + container_name: pg1 + environment: + POSTGRES_DB: postgres + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_PORT: 5432 + # Enable to get verbose logs from postgres + #command: ["postgres", "-c", "log_statement=all"] + + # ElasticSearch Component + # Disabled for now as too compute intensive, but feel free to try it if you dont need + # to test any search APIs + # To connect use: + # elasticsearch.server = es:9200 + # es: + # build: ./elasticsearch + # container_name: es1 + # environment: + # - node.name=es01 + # - cluster.name=es-docker-cluster + # - bootstrap.memory_lock=true + # - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + # ulimits: + # memlock: + # soft: -1 + # hard: -1 + # ports: + # - "9200:9200" + + # Main Application Component + # Options: + # * TEST - Just build (and not serve the app). Comment this to enable local deployment. + # * LOAD - Trigger a reload of the inserts in the database. + # After running once, comment this out so the container will serve without reloading inserts. + # Has no effect if TEST is specified. + app-portal: + build: . + container_name: cgap-portal + command: "/home/nginx/cgap-portal/entrypoint.sh" + environment: + # Default to caller's environment + AWS_ACCESS_KEY_ID: + AWS_SECRET_ACCESS_KEY: + # TEST: "true" # default: commented out to run local deployment - uncomment to run tests in the container + LOAD: "true" # default: load the inserts (comment this out after first run) + application_type: "local" # use local entrypoint + ENCODED_VERSION: "local" + CGAP_ENV_NAME: "cgap-docker-will-test" # XXX: update me + ports: + - "8000:8000" # nginx proxy port (note application traffic is not forwarded) + depends_on: + - db + #- es diff --git a/docs/source/account_creation.rst b/docs/source/account_creation.rst deleted file mode 100644 index c12e37ca2f..0000000000 --- a/docs/source/account_creation.rst +++ /dev/null @@ -1,26 +0,0 @@ -Account Creation -~~~~~~~~~~~~~~~~ - - -* Please email data wranglers at `support@4dnucleome.org `_ to get set up with an account with the access credentials for your role. -* Please provide an email address which you wish to use for your account and CC your PI for validation purposes. **The email associated with the account you use for login must be the same as the one registered with the 4DN-DCIC.** - - * This can be any email address (\ *e.g. an institutional email account*\ ) but must be connected to either a Google or Github account. - * For more information on linking your institutional email account to a Google account, see below. - -Signing in with your institutional email address -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - - -* The DCIC uses the `OAuth `_ authentication system which will allow you to login with a Google or `GitHub `_ account. -* If you prefer to use your institutional email address to log in to the portal (recommended), you need to have a Google or GitHub account registered with that email address. -* If you do not already have a Google or GitHub account with your email address, you can set up one up by visiting the `Google account creation page with the non-gmail option `_. - -NOTE that it is important not to register this account to have gmail as your institutional email address must be the primary email associated with the google account for authentication to work properly! - -Once your account request is processed, you will then be able to log in with the 'LOG IN WITH GOOGLE' option using your institutional email address and Google account password. - - -.. image:: /static/img/docs/submitting-metadata/new-google-acct.png - :target: /static/img/docs/submitting-metadata/new-google-acct.png - :alt: Embedded fields diff --git a/docs/source/beanstalk-deployment.rst b/docs/source/beanstalk-deployment.rst deleted file mode 100644 index a0409c3e4b..0000000000 --- a/docs/source/beanstalk-deployment.rst +++ /dev/null @@ -1,117 +0,0 @@ - -Beanstalk Deployment -==================== - -**NOTE** Much of this document is outdated. As of September, 2019, deployments are managed through torb and dcicutils/beanstalk_utils. The Travis deployment section is still applicable. - -Beanstalk deployment through travis ------------------------------------ - -Currently Travis is set to deploy to beansalk on succesful build. - - -* Branch 'master' will deploy to the 4dn-web-dev environment (if all test pass) -* Branch 'prodution' will deploy to the 4dn-prod environment (if all tests pass) - -So to push something to production it should go through the following steps. - - -#. Pull request is created for feature branch. -#. Pull request accepted and merged to master. -#. Travis will pick this up run tests and deploy to 4dn-web-dev -#. If that is all succcesful to deploy to production do. -#. git checkout production -#. git merge master -#. edit deploy_beanstalk.py and change version number on line 10 to be next version. -#. Check in your changes. -#. git push origin production -#. Travis will then run tests and if pass will deploy to production - -Dropping database ------------------ - -For test environment the database is not dropped for each deploy. This means that new upserts, -which change existing data will in most cases not execute succesfully on the test environment (Unit upgrades are put back in place). - -When that happens we need to drop the database and recreate it, so the inserts can be run. - -The Old hard way to do it.. boooo :( -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Easiest way to do that is to ssh into the beanstalk instance and do the follow: - -** Note ** to ssh in first ``pip install awsebcli`` then follow the setup instructions. With that installed you can simply type eb ssh (ensuring that the master branch is checked out). (If this doesn't work, try ``eb init`` before ``eb ssh``\ ) - -Once connected do the following: - -.. code-block:: bash - - source /opt/python/current/env - sudo service httpd stop - echo $RDS_PASSWORD - - dropdb -p $RDS_PORT -h $RDS_HOSTNAME -U $RDS_USERNAME -e $RDS_DB_NAME - - createdb -p $RDS_PORT -h $RDS_HOSTNAME -U $RDS_USERNAME -e $RDS_DB_NAME - - - # drop indexes in elastic search - curl -XDELETE 'http://172.31.49.128:9872/annotations' - - # for 4dn-web-dev (Development Environment) - curl -XDELETE 'http://172.31.49.128:9872/snovault' - - # for production (PLEASE DONT SCREW THIS UP :) ) - curl -XDELETE 'http://172.31.49.128:9872/ffprod' - - sudo shutdown -r now - - # this will drop you back to your local machine, if you want to trigger latest build from master (and you know it's a clean build) - - git checkout master - eb deploy - -The New awesome way to do it: -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code-block:: bash - - sudo /opt/python/current/app/bin/dropdb - - # to bring things up again from back home - git checkout production - eb deploy - -Bye bye data. Use at your own risk, all warranties void. - -** Note ** this will temporarily bring the site down, for a couple of minutes - -Database backup / restore -------------------------- - -Database snapshots are automatically taken every day. To restore a backup on production (4dnweb-prod) - - -#. Go to the RDS tab and then look at the snapshots page. -#. Select the backup you want to restore. -#. Click Restore Snapshot -#. You will be prompted for a DB Instance Name, name it what you like. -#. Go to 4dnweb-prod environment and select configuration -> software configuration -#. Change the enviornment variable bnSTaLk_HOSTNAME to the name you just used for the new database. -#. Redeploy the applicaition production. - -Rebuilding beanstalk environemtns ---------------------------------- - -Any attempt to delete one of the beanstalk environment will most likely fail due to an inability to delete a secuirty group. This is because our RDS security group sets inbound rules for the beanstalk enviroments. So before you rebuild a beanstalk environment do the following: - - -#. Go to EC2's (aws console) -#. Select Security Groups -#. Search for sg-ab1d63d1 (this is our RDS security group) -#. Select inboud rules. -#. Find the inboud rule associated with the beanstalk environment security group (probably sg-something) -#. Remove that inboud rule. -#. Rebuild the envrionemnt. -#. You will need to add a new inbound rule to the RDS security group with the security group of the rebuilt Abeanstalk environment before deployment will be successful. -#. Security group id for beanstalk environment can be found under configuration -> Instances -> EC2 security Groups. diff --git a/docs/source/biosample_metadata.rst b/docs/source/biosample_metadata.rst deleted file mode 100644 index 3675b9d812..0000000000 --- a/docs/source/biosample_metadata.rst +++ /dev/null @@ -1,239 +0,0 @@ -Sample Metadata ---------------- - - -* The 4DN consortium will collect metadata on the preparation of a biological sample (biosample) in order to make the data FAIR, Findable, Accessible, Interoperable and Reusable, to the extent that such a service benefits the network and scientific community at large. -* Many 4DN experiments are performed using cell lines. Other experiments may be performed on isolated primary cells or tissues. -* Experimenters may also perform assays where cells are transiently treated, for example by addition of a drug or introduction of a silencing construct, or stably genomically modified through Crispr technologies. - -This page outlines and describes the types of metadata that is requested for biological samples. - - -* The first part of the document outlines the few fields shared by all biosamples. -* The Cell Lines and Samples Working Group has focused on developing requirements for cell line metadata and this is the primary focus of the `remainder of this document <#basic-biosample-metadata>`_. - -*Note that the working group is still discussing some of the metadata and requirements are evolving. If you have any questions or concerns please feel free to `contact us `_.* - -Basic Biosample Metadata -^^^^^^^^^^^^^^^^^^^^^^^^ - -Biosample Fields -^^^^^^^^^^^^^^^^ - -``description`` - **Required** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* A brief specific description of the biosample -* example "K562 cells prepared for in situ Hi-C experiments" -* example "GM12878 cells modified using Crispr to delete CTCF sites in the PARK2 region prepared for chromatin capture experiments" - -``biosource`` - **Required** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* The value of this field is a reference to usually one **Biosource** object whose metadata is submitted separately. -* This **Biosource** object refers to a cell line, tissue or primary cell and has its own associated metadata. - - * **NOTE**\ : The tiered cell lines all have an existing biosource in the database that can be re-used and referenced by it's accession, alias or uuid - while other biosources may require you to submit metadata for them. - -* It is possible, though rare, for a single biosample to consist of more than one biosource - eg. pooling of two different cell lines - in these cases you can reference multiple biosources in this field. - -``cell_culture_details`` - **Required only for cultured cell lines** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* The value of this field is a reference to a *BiosampleCellCulture* object whose metadata is submitted separately and is detailed in the *Cell Culture Metadata section below*. - -``modifications`` - **Required** if cells have been genomically modified {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* **Genetic modifications** - this field is **required** when a Biosample has been genomically modified eg. Crispr modification of a cell line. -* The value of this field is a list of one or more references to a **Modification** object whose metadata is submitted separately. -* Modifications include information on expression or targeting vectors stably transfected to generate Crispr'ed or other genomically modified samples. - -``treatments`` - **Required** if cells have been treated 'transiently' with drugs or by transfection. {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* This field is used when a Biosample has been treated with a chemical/drug or transiently altered using RNA interference techniques. -* The value of this field is a reference to a **Treatment** object whose metadata is submitted separately. -* There are currently two general types of treatments - more will be added as needed. - - #. Addition of a drug or chemical - #. Transient or inducible RNA interference - -``biosample_protocols`` - Optional {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* Protocols used in Biosample Preparation - this is distinct from SOPs and protocol for cell cultures. -* example protocol description "Preparation of isolated brain tissue from BALB/c adult mice for chromatin capture experiments" -* The value of this field is a list of references to a **Protocol** object - an alias or uuid. -* The **Protocol** object can include an attachment to a pdf document describing the steps of the preparation. -* The **Protocol** object is of type 'Biosample preparation protocol' and can be further classified as 'Tissue Preparation Methods' if applicable. - -Cell Culture Metadata -^^^^^^^^^^^^^^^^^^^^^ - - -* The consortium has designated 4 cell lines as `Tier 1 `_\ , which will be a primary focus of 4DN research and integrated analysis. -* A number of other lines that are expected to be used by multiple labs and have approved SOPs for maintaining them have been designated `Tier 2 `_. -* In addition, some labs may wish to submit datasets produced using other cell lines. - -To maintain consistent data standards and in order to facilitate integrated analysis the Cell Lines and Samples Working Group has adopted the following policy. - -Certain types of metadata, if not submitted will prevent your data from being flagged “gold standard”. For your data to be considered “gold standard”, you will need to obtain your cells from the approved source and grow them precisely according to the approved SOP and include the following required information: - - -#. A light microscopy image (DIC or phase contrast) of the cells at the time of harvesting (omics) or under each experimental condition (imaging); -#. culture start date, culture harvest date, culture duration, passage number and doubling number - -Other metadata is strongly encouraged and the exact requirements may vary somewhat depending on the cell type and when the data was produced (i.e. some older experiments can be 'grandfathered' in even if they do not 'pass' all the requirements). - -The biosample cell culture metadata fields that can be submitted are described below. - -BiosampleCellCulture fields -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -``description`` - Strongly Encouraged {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* A short description of the cell culture procedure -* example "Details on culturing a preparation of K562 cells" - -``morphology_image`` - **Required** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* Phase Contrast or DIC Image of at least 50 cells showing morphology at the time of collection -* This is an authentication standard particularly relevant to Tiered cell lines. -* The value of this field is a reference to an **Image** object that needs to be submitted separately. - -``culture_start_date`` - **Required** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* The date the the cells were most recently thawed and cultured for the submitted experiment -* Date can be submitted in as YYYY-MM-DD or YYYY-MM-DDTHH:MM:SSTZD ((TZD is the time zone designator; use Z to express time in UTC or for time expressed in local time add a time zone offset from UTC +HH:MM or -HH:MM). -* example Date only (most common use case) - "2017-01-01" -* example Date and Time (uncommonly used) -"2017-01-01T17:00:00+00:00" - note for time; hours, minutes, seconds and offset are required but may be 00 filled. - -``culture_harvest_date`` - **Required** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* The date the culture was harvested for biosample preparation. -* Date format as above. - -``culture_duration`` - **Required** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* Total Days in Culture. -* Total number of culturing days since receiving original vial, including pyramid stocking and expansion since thawing the working stock, through to harvest date. -* The field value is a number - can be floating point -* example "5" -* example "3.5" - -``passage_number`` - **Required** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* Number of passages since receiving original vial, including pyramid stocking and expansion since thawing the working stock, through to harvest date. -* Only integer values are allowed in this field eg. 3, 5, 11 - -``doubling_number`` - **Required** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* The number of times the population has doubled since the time of thaw (culture start date) until harvest. -* This may be determined and reported in different ways - - #. passage ratio and number of passages - #. direct cell counts. - -* Therefore, this field takes a string value -* example "7.88" -* example "5 passages split 1:4" - -``follows_sop`` - **Required** {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* Flag to indicate if the 4DN SOP for the specified cell line was followed - options '\ **Yes**\ ' or '\ **No**\ ' -* If a cell line is not one of the 'Tiered' 4DN lines this field should be set to 'No' - -``protocols_additional`` - **Required** if 'follows_sop' is 'No' {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* Protocols used in Cell Culture when there is deviation from a 4DN approved SOP. -* Protocols describing non-4DN protocols or deviations from 4DN SOPs, including additional culture manipulations eg. stem cell differentiation or cell cycle synchronization if they do not follow recommended 4DN SOPs -* The value of this field is a list of references to a **Protocol** object - an alias or uuid. -* The **Protocol** object can include an attachment to the pdf document. - -``doubling_time`` - Optional {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* Population Doubling Time -* The average time from thaw (culture start date) until harvest it takes for the population to double. -* Researchers can record the number of times they split the cells and by what ratio as a simple approximation of doubling time. This is especially important for some cell lines eg. IMR90 (a mortal line) and HI and H9 human stem cells. -* eg. '2 days' - -``authentication_protocols`` - Optional {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* References to one or more **Protocol** objects can be submitted in this field. -* The **Protocol** objects should be of the type 'Authentication document' -* The **Protocol** object can be further classified by indicating a specific classification eg. 'Karyotyping authentication' or 'Differentiation authentication'. -* The **Protocol** description should include specific information on the kind of authentication - - * example "g-banding karyotype report" - * example "images of FoxA2 and Sox17 expression in differentiated endoderm cells" - -* The **Protocol** object can include an attachment to the pdf or image document. - -``karyotype`` - Optional description of cell ploidy and karyotype {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* Description of cell Ploidy - a textual description of the population ploidy and/or karyotype. -* Important for potentially genomically unstable lines and strongly encouraged if the passage number of an unstable line is greater than 10. -* A textual description of chromosome count and any noted rearrangements or copy number variations. -* examples include - - * chromosome counts or structural variation using sequencing data - * chromosome counts using droplet PCR - * cytological G-banding - -* Using this field allows this information to be queried in searches. -* **NOTE** An image or authentication document (see above) may be submitted in place or in addition to this. - -``differentiation_state`` - Optional {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* For cells that have undergone differentiation a description of the differention state and markers used to determine the state. -* Using this field allows this information to be queried in searches. -* example 'Definitive endoderm as determined by the expression of Sox17 and FoxA2' -* **NOTE** An authentication document (see above) can be submitted in place or in addition to this. - -``synchronization_stage`` - Optional {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* If a culture is synchronized then the cell cycle stage or description of the point from which the biosample used in an experiment is prepared. -* Using this field allows this information to be queried in searches. -* example 'M-phase metaphase arrested cells' -* **NOTE** An authentication document (see above) can be submitted in place or in addition to this. - -``cell_line_lot_number`` - Strongly Suggested for non-Tier 1 cells {:.text-400} -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - -* For 4DN Tier2 or unclassified cell lines - a lot number or other information to uniquely identify the source/lot of the cells diff --git a/docs/source/docker-local.rst b/docs/source/docker-local.rst new file mode 100644 index 0000000000..927e0cb153 --- /dev/null +++ b/docs/source/docker-local.rst @@ -0,0 +1,80 @@ +CGAP-Docker (Local) +=================== + +With Docker, it is possible to run a local deployment of CGAP without installing any system level +dependencies other than Docker. A few important notes on this setup. + +* Although the build dependency layer is cached, it still takes around 4 minutes to rebuild the front-end for each image. This limitation is tolerable considering the local deployment now identically matches the execution runtime of production. +* This setup only works when users have sourced AWS Keys in the main account (to connect to the shared ES cluster). +* IMPORTANT: Do not upload the local deployment container image to any registry. + + +Installing Docker +^^^^^^^^^^^^^^^^^ + +Install Docker with (OSX assumed):: + + $ brew install docker + + +Configuring CGAP Docker +^^^^^^^^^^^^^^^^^^^^^^^ + + +Prior to building the image, navigate to ``deploy/docker/local``, open ``docker_development.ini`` and make the following modifications (at a minimum). + +* Modify ``env.name`` and ``indexer.namespace`` - these values must be globally unique with respect to our infrastructure (feel free to just replace the name) +* Consider changing load_prod_data to load_local_data if you need to load more inserts +* Once you have loaded inserts once, comment out L54 in ``docker-compose.yml`` to disable automatic insert reloading + +Building CGAP Docker +^^^^^^^^^^^^^^^^^^^^ + + +There are two new Make targets that should be sufficient for normal use. To build the image locally, ensure your AWS keys are sourced and run:: + + $ make build-docker-local # runs docker-compose build + $ make build-docker-local-clean # runs a no-cache build, regenerating all layers + $ make deploy-docker-local # runs docker-compose up + $ make deploy-docker-local-daemon # runs services in background + +The build will take around 10 minutes the first time but will speed up dramatically after due to layer caching. In general, the rate limiting step for rebuilding is the front-end build (unless you are also updating dependencies, which will slow down the build further). Although this may seem like a drawback, the key benefit is that what you are running in Docker is essentially identical to that which is orchestrated on ECS in production. This should reduce our reliance/need for test environments. + +Accessing CGAP Docker at Runtime +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + +To access the running container:: + + $ docker ps # will show running containers + $ docker exec -it bash + + +Alternative Configuration with Local ElasticSearch +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +ElasticSearch is too compute intensive to virtualize on most machines. For this reason we use the CGAP test ES cluster for this deployment instead of spinning up an ES cluster in Docker. It is possible however to modify ``docker-compose.yml`` to spinup a local Elasticsearch. If your machine can handle this it is the ideal setup, but typically things are just too slow for it to be viable (YMMV). + + +Common Issues +^^^^^^^^^^^^^ + +Some notable issues that you may encounter include: + + * The NPM build may fail/hang - this can happen when Docker does not have enough resources. Try upping the amount CPU/RAM you are allocating to Docker. + * Nginx install fails to locate GPG key - this happens when the Docker internal cache has run out of space and needs to be cleaned - see documentation on `docker prune `_. + + +Docker Command Cheatsheet +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Below is a small list of useful Docker commands for advanced users:: + + $ docker-compose build # will trigger a build of the local cluster (see make build-docker-local) + $ docker-compose build --no-cache # will trigger a fresh build of the entire cluster (see make build-docker-local-clean) + $ docker-compose down # will stop cluster (can also ctrl-c) + $ docker-compose down --volumes # will remove cluster volumes as well + $ docker-compose up # will start cluster and log all output to console (see make deploy-docker-local) + $ docker-compose up -d # will start cluster in background using existing containers (see make deploy-docker-local-daemon) + $ docker-compose up -d -V --build # trigger a rebuild/recreation of cluster containers + $ docker system prune # will cleanup ALL unused Docker components - BE CAREFUL WITH THIS diff --git a/docs/source/docker-production.rst b/docs/source/docker-production.rst new file mode 100644 index 0000000000..915cfdbee8 --- /dev/null +++ b/docs/source/docker-production.rst @@ -0,0 +1,71 @@ +CGAP-Docker (Production) +======================== + +CGAP-Docker runs in production on AWS Elastic Container Service, meant to be orchestrated from the 4dn-cloud-infra repository. End users will modify the ``Makefile`` to suite their immediate build needs with respect to target AWS Account/ECR Repository/Tagging strategy. For more information on the specifics of the ECS setup, see 4dn-cloud-infra. + +The CGAP Application has been orchestrated into the ECS Service/Task paradigm. As of writing all core application services are built into the same Docker image. Which entrypoint to run is configured by environment variable passed to the ECS Task. As such, we have 4 separate services described by the following table: + ++------------+--------------------------------+-----+------+------+-----+--------------------------+ +| Kind | Use | Num | Spot | vCPU | Mem | Notes | ++============+================================+=====+======+======+=====+==========================+ +| Portal | Services standard API requests | 4 | Yes | .25 | 512 | Needs autoscaling | ++------------+--------------------------------+-----+------+------+-----+--------------------------+ +| Indexer | Hits /index at 1sec | 4 + | Yes | .25 | 512 | Can auto-scale based on | +| | intervals indefinitely. | | | | | Queue Depth | ++------------+--------------------------------+-----+------+------+-----+--------------------------+ +| Ingester | Polls SQS for ingestion tasks | 1 | No | .5 | 1024| Need API to add tasks | ++------------+--------------------------------+-----+------+------+-----+--------------------------+ +| Deployment | Triggers the standard | 0 | Yes | .24 | 512 | Run explicitly or (TODO) | +| | deployment actions. | | | | | by API | ++------------+--------------------------------+-----+------+------+-----+--------------------------+ + +Building an Image +^^^^^^^^^^^^^^^^^ + +The production application configuration is in ``deploy/docker/production``. A description of all the relevant files follows. + + * Dockerfile - at repo top level - configurable file containing the Docker build instructions for all local and production images. + * docker-compose.yml - at repo top level - configures the local deployment, unused in production. + * assume_identity.py - script for pulling global application configuration from Secrets Manager. Note that this secret is meant to be generated by the Datastore stack in 4dn-cloud-infra and manually filled out. Note that the ``$IDENTITY`` option configures which secret is used by the application workers and is passed to ECS Task definitions by 4dn-cloud-infra. + * entrypoint.sh - resolves which entrypoint is used based on ``$application_type`` + * entrypoint_portal.sh - serves portal API requests + * entrypoint_deployment.sh - deployment entrypoint + * entrypoint_indexer.sh - indexer entrypoint + * entrypoint_ingester.sh - ingester entrypoint + * install_nginx.sh - script for pulling in nginx + * mastertest.ini - base ini file used to build production.ini on the server + * nginx.conf - nginx configuration + + +The following instructions describe how to build and push images. Note though that we assume an existing ECS setup. For instructions on how to orchestrate ECS, see 4dn-cloud-infra, but that is not the focus of this documentation. + + 1. Ensure the orchestrator credentials are sourced, or that your IAM user has been granted sufficient perms to push to ECR. + 2. In the Makefile under ``build-docker-production``, replace "cgap-mastertest" with the env.name configured for the environment. This name should match the ECR repo name if you navigate to the ECR Console. + 3. Again in the Makefile, replace the ECR Repo URL (NOT the tag) with the one from the output of the ECR stack in the account. + 4. Run ``make ecr-login``, which should pull ECR credentials using the currently active AWS credentials. + 5. Run ``make build-docker-production``. + 6. Navigate to Foursight and queue the cluster update check. After around 5 minutes, the new images should be coming online. You can monitor the progress + + +Note that steps 4 and 6 are all that are needed to be repeated once initial setup is done, assuming you are continuously pushing to the same location. To change which ECS orchestration you are effectively deploying to, all steps must be repeated in the relevant account. + + +Tagging Strategy +^^^^^^^^^^^^^^^^ + +As stated previously, there is a single image tag, typically ``latest``, that determines the image tag that ECS will use. This tag is configurable from the 4dn-cloud-infra repository. + +After a new image version has been pushed, issue a forced deployment update to the ECS cluster through Foursight. This action will spawn a new set of tasks for all services using the newer image tags. For the portal, once the new tasks are deemed healthy by ECS and the Application Load Balancer, they will be added to the Portal Target Group and immediately begin serving requests. At that time the old tasks will begin the de-registration process from the target group, after which they will be spun down. The remaining new tasks will come online more quickly since they do not need to pass load balancer health checks. Once the old tasks have been cleaned up, it is safe to trigger a deployment task through the Deployment Service. + +Common Issues +^^^^^^^^^^^^^ + +In this section we detail some common errors and what to do about them. This section should be updated as more development in this setup occurs. + +1. ``Error: denied: User: is not authorized to perform: ecr:InitiateLayerUpload on resource: `` + +This error can happen for several reasons: + * Invalid/incorrect IAM credentials + * IAM user has insufficient permissions + * IAM credentials are valid but from a different AWS account + diff --git a/docs/source/index.rst b/docs/source/index.rst index dbf1e7a8be..e3b09d0eba 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,24 +1,8 @@ -.. CGAP-Portal documentation master file, created by - sphinx-quickstart on Tue Oct 8 11:23:43 2019. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - +================== CGAP Documentation -======================================= - -.. image:: https://travis-ci.org/dbmi-bgm/cgap-portal.svg?branch=master - :target: https://travis-ci.org/dbmi-bgm/cgap-portal - -|Coverage|_ - -.. |Coverage| image:: https://coveralls.io/repos/github/4dn-dcic/fourfront/badge.svg?branch=master -.. _Coverage: https://coveralls.io/github/4dn-dcic/fourfront?branch=master - -|Quality|_ - -.. |Quality| image:: https://api.codacy.com/project/badge/Grade/f5fc54006b4740b5800e83eb2aeeeb43 -.. _Quality: https://www.codacy.com/app/4dn/fourfront?utm_source=github.com&utm_medium=referral&utm_content=4dn-dcic/fourfront&utm_campaign=Badge_Grade +================== +.. image:: https://github.com/dbmi-bgm/cgap-portal/actions/workflows/main.yml/badge.svg .. image:: https://readthedocs.org/projects/cgap-portal/badge/?version=latest @@ -41,6 +25,8 @@ Be warned that features are under active development and may not be stable! Visi infrastructure_overview dataflow_overview ingestion + docker-local + docker-production *Data Model* diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst deleted file mode 100644 index e774fa4cac..0000000000 --- a/docs/source/introduction.rst +++ /dev/null @@ -1,50 +0,0 @@ -Introduction --------- - -* The 4DN Data Portal will be the primary access point to the omics and imaging data, analysis tools, and integrative models - generated and utilized by the 4DN Network. -* The primary high level organizing principle for the data is sets of replicate experiments. -* A good entry point for exploring available data is the `Browse Page `_. -* See `below <#metadata-structure>`_ for an overview of our metadata model. -* As of September 2017, the portal is currently open to the network - for data submission for standard functional genomics experiments (Hi-C and - variants, ChIA-PET and variants, RNA-seq, ChIP-seq, - ATAC-seq). -* Continuing developments in the metadata model and data portal are ongoing. - -Notes for prospective submitters -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If you would like submit data to the portal: - - -* You will need to `create a user account `_. -* Please skim through the `metadata structure <#metadata-structure>`_. -* Check out the other pages in the **Help** menu for detailed information on the submission process. -* Of note are the required metadata for the biological samples used in experiments, which is specified `on this page `_. -* We can set up a webex call to discuss the details and the most convenient approach for your existing system. - -Metadata Structure ------------------- - -The DCIC, with input from different 4DN Network Working groups, has defined a metadata structure to describe: - - -* biological samples -* experimental methods -* data files -* analysis steps -* and other pertinent data. - -The framework for the the metadata structure is based on the work of -`ENCODE DCC `_. - -The metadata is organized as objects that are related with each other. - -In our database: - - -* The objects are stored in the `JavaScript Object Notation format `_. -* The schemas for the different object types are described in `JSON-LD format `_. -* The json schemas can be found `here `_. -* A documentation of the metadata schema is also available as a google doc `here `_. diff --git a/docs/source/local_installation.rst b/docs/source/local_installation.rst index e0e75fc0a1..55335197ae 100644 --- a/docs/source/local_installation.rst +++ b/docs/source/local_installation.rst @@ -1,10 +1,20 @@ Local Installation ================== +Docker Instructions +^^^^^^^^^^^^^^^^^^^ + +See `docker local docs. <./docker-local.rst>`_ + +Legacy Instructions +^^^^^^^^^^^^^^^^^^^ + The following instructions are for running a CGAP deployment with macOS and homebrew. +Note that as of summer 2021, these instructions are out of date. Please refer to the Docker setup. There are no guarantees the legacy instructions will work from this point forward. + CGAP is known to work with Python 3.6.x and will not work with Python 3.7 or greater. If part of the HMS team, it is -recommended to use Python 3.6.13, since that's what is running on our servers. It is best practice to create a fresh Python +recommended to use Python 3.6.13, since that's what is running on our servers. It is best practice to create a fresh Python virtualenv using one of these versions before proceeding to the following steps. * Step 0: Obtain AWS keys. These will need to added to your environment variables or through the AWS CLI (installed later in this process). diff --git a/etc/encoded-apache.conf b/etc/encoded-apache.conf deleted file mode 100644 index 069d48dfd9..0000000000 --- a/etc/encoded-apache.conf +++ /dev/null @@ -1,173 +0,0 @@ -KeepAliveTimeout 75 - -# The socket directory must be readable by the daemon process user -WSGISocketPrefix /var/run/wsgi -WSGIDaemonProcess encoded user=encoded group=encoded processes=6 threads=1 display-name=encoded-app -# No need for embedded interpreters -WSGIRestrictEmbedded On -# Pass the authorization header so basic auth works -WSGIPassAuthorization On - -# Indexer. Configure first to avoid catchall '/' -WSGIDaemonProcess encoded-indexer user=encoded group=encoded processes=1 threads=1 display-name=encoded-indexer -WSGIScriptAlias /_indexer /srv/encoded/parts/production-indexer/wsgi process-group=encoded-indexer application-group=%{GLOBAL} - -# https://github.com/GrahamDumpleton/mod_wsgi/issues/2 -SetEnvIf Request_Method HEAD X_REQUEST_METHOD=HEAD - -LogFormat "%v:%p %h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\" %{X-Stats}o&server_time=%D" vhost_combined_stats - - - Order deny,allow - Allow from all - - Require all granted - - - - -# Specifying process-group and application-group here ensures processes are started on httpd start -WSGIScriptAlias / /srv/encoded/parts/production/wsgi process-group=encoded application-group=%{GLOBAL} - - - Order deny,allow - Allow from all - - Require all granted - - # Limit upload size to 500 MB (375MB before base64 encoding) - LimitRequestBody 524288000 - # Apache adds -gzip to outgoing ETag in mod_deflate, remove inbound. - # https://issues.apache.org/bugzilla/show_bug.cgi?id=39727 - RequestHeader edit If-Match -gzip\"$ \" - RequestHeader edit If-None-Match -gzip\"$ \" - - # CORS support - Header always set Access-Control-Allow-Origin "*" - Header always set Access-Control-Allow-Methods "GET, HEAD" - Header always set Access-Control-Allow-Headers "Accept, Origin, Range, X-Requested-With" - Header always set Access-Control-Expose-Headers: "Content-Length, Content-Range, Content-Type" - - # CORS preflight - RewriteCond %{REQUEST_METHOD} OPTIONS - RewriteRule ^ - [redirect=200,last] - - -# Serve static resources directly from Apache -Alias /static /srv/encoded/src/encoded/static -Alias /favicon.ico /srv/encoded/src/encoded/static/img/favicon.ico - - - Order deny,allow - Allow from all - - Require all granted - - - -# Compress JSON responses. -AddOutputFilterByType DEFLATE application/javascript application/json text/css text/html text/javascript - -# Source map type (to enable compression) - - ForceType application/json - - -RewriteEngine On - -# Exclude robots from all but production site -#RewriteCond %{HTTP_HOST} =www.encodeproject.org -#RewriteRule ^/robots\.txt$ /static/robots.txt [last,passthrough] -#RewriteRule ^/robots\.txt$ /static/dev-robots.txt [last,passthrough] - -# Google site verification -#RewriteRule ^/google[0-9a-f]+.html$ /static$0 [last,passthrough] - -# Proxy modencode comparative page - - ProxyPass http://cake.encodedcc.org/comparative - ProxyPassReverse http://cake.encodedcc.org/comparative - - -# Proxy internal redirects for file downloads -SSLProxyEngine On -RewriteCond %{ENV:REDIRECT_STATUS} . -RewriteRule ^/_proxy/(.+)$ $1 [proxy] - -# Forbid PUT/PATCH/POST to plain http -RewriteCond %{HTTP:X-Forwarded-Proto} =http -RewriteCond %{REQUEST_METHOD} !^(GET|HEAD)$ -RewriteCond %{HTTP_HOST} ^(www\.encodeproject\.org|test\.encodedcc\.org)$ -RewriteRule ^ - [forbidden] - -# Forbid basic auth to plain http -RewriteCond %{HTTP:X-Forwarded-Proto} =http -RewriteCond %{HTTP:Authorization} . -RewriteCond %{HTTP_HOST} ^(www\.encodeproject\.org|test\.encodedcc\.org)$ -RewriteRule ^ - [forbidden] - -ErrorDocument 403 "Forbidden. HTTPS required for authenticated access." - -# Redirect no-www to https://www.encodeproject.org -#RewriteCond %{HTTP_HOST} =encodeproject.org -#RewriteCond %{REQUEST_METHOD} ^(GET|HEAD)$ -#RewriteCond %{HTTP:Authorization} !. -#RewriteRule ^ https://www.encodeproject.org%{REQUEST_URI} [redirect=permanent,last,qsappend] - -# Redirect to https -#RewriteCond %{HTTP:X-Forwarded-Proto} =http -#RewriteCond %{HTTP_HOST} ^(www\.encodeproject\.org|test\.encodedcc\.org)$ -#RewriteRule ^ https://%{HTTP_HOST}%{REQUEST_URI} [redirect=permanent,last,qsappend] - -################### -# Portal redirects - -# Normalize index.html etc. -RewriteRule ^/ENCODE$ $0/ [nocase] -RewriteRule ^/encode/(.*)$ /ENCODE/$1 -RewriteRule ^/ENCODE/FAQ$ $0/ -RewriteRule ^(/ENCODE/.+)\.html$ $1 -RewriteRule ^(/ENCODE(/|/.+/))index$ $1 - -# Redirect -RewriteRule ^/ENCODE/$ /? [last,redirect=permanent] -RewriteRule ^/ENCODE/search$ /search/?type=experiment [last,redirect=permanent] -RewriteRule ^/ENCODE/dataSummary$ /search/?type=experiment [last,redirect=permanent] -RewriteRule ^/ENCODE/dataMatrix/encodeDataMatrixMouse$ /search/?type=experiment&replicates.library.biosample.donor.organism.scientific_name=Mus\ musculus [last,redirect=permanent] -RewriteRule ^/ENCODE/dataMatrix/encodeDataMatrixHuman$ /search/?type=experiment&replicates.library.biosample.donor.organism.scientific_name=Homo\ sapiens [last,redirect=permanent] -RewriteRule ^/ENCODE/dataMatrix/encodeChipMatrixHuman$ /search/?type=experiment&replicates.library.biosample.donor.organism.scientific_name=Homo\ sapiens&assay_term_name=ChIP-seq [last,redirect=permanent] -RewriteRule ^/ENCODE/dataMatrix/encodeDataSummaryHuman$ /search/?type=experiment&replicates.library.biosample.donor.organism.scientific_name=Homo\ sapiens [last,redirect=permanent] -RewriteRule ^/ENCODE/dataMatrix/encodeChipMatrixMouse$ /search/?type=experiment&replicates.library.biosample.donor.organism.scientific_name=Mus\ musculus&assay_term_name=ChIP-seq [last,redirect=permanent] -RewriteRule ^/ENCODE/dataMatrix/encodeDataSummaryMouse$ /search/?type=experiment&replicates.library.biosample.donor.organism.scientific_name=Mus\ musculus [last,redirect=permanent] -RewriteRule ^/ENCODE/terms$ /about/data-use-policy/? [last,redirect=permanent] -RewriteRule ^/ENCODE/cellTypes$ /search/?type=biosample&organism.scientific_name=Homo\ sapiens [last,redirect=permanent] -RewriteRule ^/ENCODE/cellTypesMouse$ /search/?type=biosample&organism.scientific_name=Mus\ musculus [last,redirect=permanent] -RewriteRule ^/ENCODE/antibodies$ /search/?type=antibody_approval [last,redirect=permanent] -RewriteRule ^/ENCODE/softwareTools$ /software/? [last,redirect=permanent] -RewriteRule ^/ENCODE/experiment_guidelines$ /about/experiment-guidelines/? [last,redirect=permanent] -RewriteRule ^/ENCODE/platform_characterization$ /data-standards/platform-characterization/? [last,redirect=permanent] -RewriteRule ^/ENCODE/qualityMetrics$ /data-standards/2012-quality-metrics/? [last,redirect=permanent] -RewriteRule ^/ENCODE/contributors$ /about/contributors/? [last,redirect=permanent] -RewriteRule ^/ENCODE/analysis$ /about/2012-integrative-analysis/? [last,redirect=permanent] -RewriteRule ^/ENCODE/pubsOther$ /publications/? [last,redirect=permanent] -RewriteRule ^/ENCODE/pubsEncode$ /publications/? [last,redirect=permanent] -RewriteRule ^/ENCODE/fileFormats$ /help/file-formats/? [last,redirect=permanent] -RewriteRule ^/ENCODE/contacts$ /help/contacts/? [last,redirect=permanent] -RewriteRule ^/ENCODE/FAQ/$ /tutorials/? [last,redirect=permanent] -RewriteRule ^/ENCODE/usageResources$ /tutorials/? [last,redirect=permanent] -RewriteRule ^/ENCODE/releaseLog$ /about/contributors/? [last,redirect=permanent] -RewriteRule ^/ENCODE/pilot$ /about/contributors/? [last,redirect=permanent] -RewriteRule ^/ENCODE/downloads$ /help/getting-started/? [last,redirect=permanent] -RewriteRule ^/ENCODE/downloadsMouse$ /help/getting-started/? [last,redirect=permanent] -RewriteRule ^/ENCODE/otherTerms$ /help/getting-started/? [last,redirect=permanent] -RewriteRule ^/ENCODE/integrativeAnalysis/VM$ http://encodedcc.stanford.edu/ftp/encodevm/? [last,redirect=permanent] -RewriteRule ^/encyclopedia/visualize http://genome.ucsc.edu/cgi-bin/hgTracks?db=hg19&hgt.customText=http://bib.umassmed.edu/~iyers/encode_elements/display/tracks.txt [last,redirect=permanent] - -# Fallback -RewriteRule ^/ENCODE/.*$ - [gone] - -# Redirect to genome browser -RewriteRule ^/cgi-bin/hgTracks$ http://genome.ucsc.edu/cgi-bin/hgTracks [last,redirect=permanent] -RewriteRule ^/cgi-bin/hgTables$ http://genome.ucsc.edu/cgi-bin/hgTables [last,redirect=permanent] -RewriteRule ^/cgi-bin/hgTrackUi$ http://genome.ucsc.edu/cgi-bin/hgTrackUi [last,redirect=permanent] -RewriteRule ^/cgi-bin/hgHubConnect$ http://genome.ucsc.edu/cgi-bin/hgHubConnect [last,redirect=permanent] diff --git a/etc/logging-apache.conf b/etc/logging-apache.conf deleted file mode 100644 index 2ad19759a8..0000000000 --- a/etc/logging-apache.conf +++ /dev/null @@ -1 +0,0 @@ -CustomLog ${APACHE_LOG_DIR}/access.log vhost_combined_stats diff --git a/examples/s3cp.py b/examples/s3cp.py deleted file mode 100644 index 4c3cc90b0f..0000000000 --- a/examples/s3cp.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python -# -*- coding: latin-1 -*- -import requests, subprocess, shlex, urlparse, os, sys - -AUTHID='user'; AUTHPW='secret'; HEADERS = {'content-type': 'application/json'}; SERVER = 'https://www.encodeproject.org/' -S3_SERVER='s3://encode-files/' - -#get all the file objects -files = requests.get( - 'https://www.encodeproject.org/search/?type=file&frame=embedded&limit=all', - auth=(AUTHID,AUTHPW), headers=HEADERS).json()['@graph'] - -#select your file -f_obj = files[123] - -#make the URL that will get redirected - get it from the file object's href property -encode_url = urlparse.urljoin(SERVER,f_obj.get('href')) - -#stream=True avoids actually downloading the file, but it evaluates the redirection -r = requests.get(encode_url, auth=(AUTHID,AUTHPW), headers=HEADERS, allow_redirects=True, stream=True) -try: - r.raise_for_status -except Exception: - print '%s href does not resolve' %(f_obj.get('accession')) - sys.exit() - -#this is the actual S3 https URL after redirection -s3_url = r.url - -#release the connection -r.close() - -#split up the url into components -o = urlparse.urlparse(s3_url) - -#pull out the filename -filename = os.path.basename(o.path) - -#hack together the s3 cp url (with the s3 method instead of https) -bucket_url = S3_SERVER.rstrip('/') + o.path -#print bucket_url - -#ls the file from the bucket -s3ls_string = subprocess.check_output(shlex.split('aws s3 ls %s' %(bucket_url))) -if s3ls_string.rstrip() == "": - print >> sys.stderr, "%s not in bucket" %(bucket_url) -else: - print "%s %s" %(f_obj.get('accession'), s3ls_string.rstrip()) - -#do the actual s3 cp -#return_value = subprocess.check_call(shlex.split('aws s3 cp %s %s' %(bucket_url, filename))) diff --git a/examples/submit_file.py b/examples/submit_file.py deleted file mode 100644 index 4ca2a96d5e..0000000000 --- a/examples/submit_file.py +++ /dev/null @@ -1,188 +0,0 @@ -""" Example file submission script - -Requires the `aws` command line utility: http://aws.amazon.com/cli/ -""" -import hashlib -import json -import os -import requests -import subprocess -import sys -import time - -host = 'http://localhost:6543' -encoded_access_key = '...' -encoded_secret_access_key = '...' - -path = 'example.fastq.gz' -my_lab = '/labs/your-lab-here' -my_award = '/awards/your-award-here' - -# From http://hgwdev.cse.ucsc.edu/~galt/encode3/validatePackage/validateEncode3-latest.tgz -encValData = 'encValData' -assembly = 'hg19' - -# ~2s/GB -print("Calculating md5sum.") -md5sum = hashlib.md5() -with open(path, 'rb') as f: - for chunk in iter(lambda: f.read(1024*1024), b''): - md5sum.update(chunk) - -data = { - "dataset": "ENCSR000ACY", - "replicate": "/replicates/6e85c807-684a-46e3-b4b9-1f7990e85720/", - "file_format": "fastq", - "file_size": os.path.getsize(path), - "md5sum": md5sum.hexdigest(), - "output_type": "reads", - "read_length": 101, - "run_type": "single-ended", - "platform": "ENCODE:HiSeq2000", - "submitted_file_name": path, - "lab": my_lab, - "award": my_award -} - - -#################### -# Local validation - -gzip_types = [ - "CEL", - "bam", - "bed", - "csfasta", - "csqual", - "fasta", - "fastq", - "gff", - "gtf", - "tar", - "sam", - "wig" -] - -magic_number = open(path, 'rb').read(2) -is_gzipped = magic_number == b'\x1f\x8b' -if data['file_format'] in gzip_types: - assert is_gzipped, 'Expected gzipped file' -else: - assert not is_gzipped, 'Expected un-gzipped file' - -chromInfo = '-chromInfo=%s/%s/chrom.sizes' % (encValData, assembly) -validate_map = { - ('fasta', None): ['-type=fasta'], - ('fastq', None): ['-type=fastq'], - ('bam', None): ['-type=bam', chromInfo], - ('bigWig', None): ['-type=bigWig', chromInfo], - ('bed', 'bed3'): ['-type=bed3', chromInfo], - ('bigBed', 'bed3'): ['-type=bed3', chromInfo], - ('bed', 'bed6'): ['-type=bed6+', chromInfo], # if this fails we will drop to bed3+ - ('bigBed', 'bed6'): ['-type=bigBed6+', chromInfo], # if this fails we will drop to bigBed3+ - ('bed', 'bedLogR'): ['-type=bed9+1', chromInfo, '-as=%s/as/bedLogR.as' % encValData], - ('bigBed', 'bedLogR'): ['-type=bigBed9+1', chromInfo, '-as=%s/as/bedLogR.as' % encValData], - ('bed', 'bedMethyl'): ['-type=bed9+2', chromInfo, '-as=%s/as/bedMethyl.as' % encValData], - ('bigBed', 'bedMethyl'): ['-type=bigBed9+2', chromInfo, '-as=%s/as/bedMethyl.as' % encValData], - ('bed', 'broadPeak'): ['-type=bed6+3', chromInfo, '-as=%s/as/broadPeak.as' % encValData], - ('bigBed', 'broadPeak'): ['-type=bigBed6+3', chromInfo, '-as=%s/as/broadPeak.as' % encValData], - ('bed', 'gappedPeak'): ['-type=bed12+3', chromInfo, '-as=%s/as/gappedPeak.as' % encValData], - ('bigBed', 'gappedPeak'): ['-type=bigBed12+3', chromInfo, '-as=%s/as/gappedPeak.as' % encValData], - ('bed', 'narrowPeak'): ['-type=bed6+4', chromInfo, '-as=%s/as/narrowPeak.as' % encValData], - ('bigBed', 'narrowPeak'): ['-type=bigBed6+4', chromInfo, '-as=%s/as/narrowPeak.as' % encValData], - ('bed', 'bedRnaElements'): ['-type=bed6+3', chromInfo, '-as=%s/as/bedRnaElements.as' % encValData], - ('bigBed', 'bedRnaElements'): ['-type=bed6+3', chromInfo, '-as=%s/as/bedRnaElements.as' % encValData], - ('bed', 'bedExonScore'): ['-type=bed6+3', chromInfo, '-as=%s/as/bedExonScore.as' % encValData], - ('bigBed', 'bedExonScore'): ['-type=bigBed6+3', chromInfo, '-as=%s/as/bedExonScore.as' % encValData], - ('bed', 'bedRrbs'): ['-type=bed9+2', chromInfo, '-as=%s/as/bedRrbs.as' % encValData], - ('bigBed', 'bedRrbs'): ['-type=bigBed9+2', chromInfo, '-as=%s/as/bedRrbs.as' % encValData], - ('bed', 'enhancerAssay'): ['-type=bed9+1', chromInfo, '-as=%s/as/enhancerAssay.as' % encValData], - ('bigBed', 'enhancerAssay'): ['-type=bigBed9+1', chromInfo, '-as=%s/as/enhancerAssay.as' % encValData], - ('bed', 'modPepMap'): ['-type=bed9+7', chromInfo, '-as=%s/as/modPepMap.as' % encValData], - ('bigBed', 'modPepMap'): ['-type=bigBed9+7', chromInfo, '-as=%s/as/modPepMap.as' % encValData], - ('bed', 'pepMap'): ['-type=bed9+7', chromInfo, '-as=%s/as/pepMap.as' % encValData], - ('bigBed', 'pepMap'): ['-type=bigBed9+7', chromInfo, '-as=%s/as/pepMap.as' % encValData], - ('bed', 'openChromCombinedPeaks'): ['-type=bed9+12', chromInfo, '-as=%s/as/openChromCombinedPeaks.as' % encValData], - ('bigBed', 'openChromCombinedPeaks'): ['-type=bigBed9+12', chromInfo, '-as=%s/as/openChromCombinedPeaks.as' % encValData], - ('bed', 'peptideMapping'): ['-type=bed6+4', chromInfo, '-as=%s/as/peptideMapping.as' % encValData], - ('bigBed', 'peptideMapping'): ['-type=bigBed6+4', chromInfo, '-as=%s/as/peptideMapping.as' % encValData], - ('bed', 'shortFrags'): ['-type=bed6+21', chromInfo, '-as=%s/as/shortFrags.as' % encValData], - ('bigBed', 'shortFrags'): ['-type=bigBed6+21', chromInfo, '-as=%s/as/shortFrags.as' % encValData], - ('rcc', None): ['-type=rcc'], - ('idat', None): ['-type=idat'], - ('bedpe', None): ['-type=bed3+', chromInfo], - ('bedpe', 'mango'): ['-type=bed3+', chromInfo], - ('gtf', None): None, - ('tar', None): None, - ('tsv', None): None, - ('csv', None): None, - ('2bit', None): None, - ('csfasta', None): ['-type=csfasta'], - ('csqual', None): ['-type=csqual'], - ('CEL', None): None, - ('sam', None): None, - ('wig', None): None, - ('hdf5', None): None, - ('gff', None): None -} - -validate_args = validate_map.get((data['file_format'], data.get('file_format_type'))) -if validate_args is not None: - print("Validating file.") - try: - subprocess.check_output(['validateFiles'] + validate_args + [path]) - except subprocess.CalledProcessError as e: - print(e.output) - raise - - -#################### -# POST metadata - -headers = { - 'Content-type': 'application/json', - 'Accept': 'application/json', -} - -print("Submitting metadata.") -r = requests.post( - host + '/file', - auth=(encoded_access_key, encoded_secret_access_key), - data=json.dumps(data), - headers=headers, -) -try: - r.raise_for_status() -except Exception: - print('Submission failed: %s %s' % (r.status_code, r.reason)) - print(r.text) - raise -item = r.json()['@graph'][0] -print(json.dumps(item, indent=4, sort_keys=True)) - - -#################### -# POST file to S3 - -creds = item['upload_credentials'] -env = os.environ.copy() -env.update({ - 'AWS_ACCESS_KEY_ID': creds['access_key'], - 'AWS_SECRET_ACCESS_KEY': creds['secret_key'], - 'AWS_SECURITY_TOKEN': creds['session_token'], -}) - -# ~10s/GB from Stanford - AWS Oregon -# ~12-15s/GB from AWS Ireland - AWS Oregon -print("Uploading file.") -start = time.time() -try: - subprocess.check_call(['aws', 's3', 'cp', path, creds['upload_url']], env=env) -except subprocess.CalledProcessError as e: - # The aws command returns a non-zero exit code on error. - print("Upload failed with exit code %d" % e.returncode) - sys.exit(e.returncode) -else: - end = time.time() - duration = end - start - print("Uploaded in %.2f seconds" % duration) diff --git a/jest/environment.js b/jest/environment.js deleted file mode 100644 index f8a4420f0a..0000000000 --- a/jest/environment.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict'; - -//jest.mock('scriptjs'); -var jsdom = require('jsdom').jsdom; - -if (window.DOMParser === undefined) { - // jsdom - window.DOMParser = function DOMParser() {}; - window.DOMParser.prototype.parseFromString = function parseFromString(markup, type) { - var parsingMode = 'auto'; - type = type || ''; - if (type.indexOf('xml') >= 0) { - parsingMode = 'xml'; - } else if (type.indexOf('html') >= 0) { - parsingMode = 'html'; - } - var doc = jsdom(markup, {parsingMode: parsingMode}); - return doc; - }; -} diff --git a/nginx.yml b/nginx.yml deleted file mode 100644 index b67777e149..0000000000 --- a/nginx.yml +++ /dev/null @@ -1,78 +0,0 @@ -#cloud-config - -# Launch instance with a network interface configured to the local IP addresses we references in the config. -# Specify availability zone as we have a different subnet in each one. -# $ aws --profile production ec2 run-instances --user-data file://nginx.yml --iam-instance-profile Name="proxy" --image-id ami-5189a661 --instance-type t2.medium --region us-west-2 --placement AvailabilityZone=us-west-2c --network-interfaces '[{"DeviceIndex": 0, "Groups": ["sg-4ae7be2f"], "SubnetId": "subnet-201d1766", "PrivateIpAddresses": [{"PrivateIpAddress": "172.31.2.70", "Primary": true}, {"PrivateIpAddress": "172.31.2.78", "Primary": false}, {"PrivateIpAddress": "172.31.2.79", "Primary": false}]}]' - -# Choose instance size based on network performance required. - -# After boot associate elastic ip addresses with the private ip addresses -# $ aws --profile production ec2 associate-address --region us-west-2 --public-ip 52.11.61.187 --private-ip-address 172.31.2.70 --allow-reassociation --instance-id -# $ aws --profile production ec2 associate-address --region us-west-2 --public-ip 52.88.20.43 --private-ip-address 172.31.2.78 --allow-reassociation --instance-id -# $ aws --profile production ec2 associate-address --region us-west-2 --public-ip 52.32.76.4 --private-ip-address 172.31.2.79 --allow-reassociation --instance-id - -apt_sources: -- source: ppa:nginx/stable - -bootcmd: -- cloud-init-per once ssh-users-ca echo "TrustedUserCAKeys /etc/ssh/users_ca.pub" >> /etc/ssh/sshd_config - -output: - all: '| tee -a /var/log/cloud-init-output.log' - -package_upgrade: true - -packages: -- awscli -- curl -- dnsmasq -- nginx-full -- ntp -- unattended-upgrades -- update-notifier-common - -power_state: - mode: reboot - -runcmd: -- aws --region us-east-1 s3 cp s3://encoded-conf-proxy/ssl.tgz ssl.tgz -- mkdir -p /etc/nginx/ssl -- tar -zxf ssl.tgz --directory /etc/nginx/ssl -# Generate a big prime number for DH SSL (takes a few minutes.) -- openssl dhparam 2048 -out /etc/nginx/ssl/dhparam.pem -- chmod 600 /etc/nginx/ssl/dhparam.pem -- curl -o /etc/nginx/nginx.conf https://raw.githubusercontent.com/ENCODE-DCC/encoded/master/encode-proxy-nginx.conf -# The final octets of the local IP addresses configured above match the public IPs -- sed -i.bak s/171.67.205./172.31.2./g /etc/nginx/nginx.conf - -write_files: -- path: /etc/apt/apt.conf.d/20auto-upgrades - content: | - APT::Periodic::Update-Package-Lists "1"; - APT::Periodic::Unattended-Upgrade "1"; - -- path: /etc/apt/apt.conf.d/50unattended-upgrades - content: | - Unattended-Upgrade::Allowed-Origins { - "${distro_id} ${distro_codename}-security"; - }; - Unattended-Upgrade::Automatic-Reboot "true"; - -- path: /etc/motd - content: | - ######################################### - ## Nginx proxy server ## - ## For demo instances: ## - ## ssh .instance.encodedcc.org ## - ######################################### - -- path: /etc/network/interfaces.d/eth0.cfg - content: | - # The primary network interface - auto eth0 - iface eth0 inet dhcp - post-up ip addr add 172.31.2.78/20 dev eth0 - post-up ip addr add 172.31.2.79/20 dev eth0 - -- path: /etc/ssh/users_ca.pub - content: ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAv/ymOcnN4LhM4NACc3Or116XXJ6KytuOgB/+1qNkOFBqBosrn7cmJ35rsoNHRgYNrCsRE9ch74RKsN6H72FtSJgBhGh/9oUK7Os6Fqt3/ZZXxgxIx6ubs/MTgrxrAnujiBxUXMXQhLKMriNMpo8mt4nGYVtLk9PBjiyfncaS8H9ZKoNio9dhP8bmTuYvioAI35dqKdSlVLyzr/XkZxia8Ki+pQ0N6uuiEwMR3ToM+LSp8wpFOOAiu4PEAujRW7us/+1hlpKWfn0J7/V3826joHE+I967Vg/+ikcVhF77JjK1nib879VgCWfmn1HPQosIpk4yJfVgGvRVI7I2nfBPVw== encoded@demo-l.encodedcc.org diff --git a/poetry.lock b/poetry.lock index 7fc786b528..2fc7260d06 100644 --- a/poetry.lock +++ b/poetry.lock @@ -77,7 +77,7 @@ lxml = ["lxml"] [[package]] name = "bitarray" -version = "2.1.3" +version = "2.2.1" description = "efficient arrays of booleans -- C extension" category = "main" optional = false @@ -106,8 +106,8 @@ s3transfer = ">=0.3.0,<0.4.0" [[package]] name = "boto3-stubs" -version = "1.17.103.post1" -description = "Type annotations for boto3 1.17.103, generated by mypy-boto3-buider 4.22.1" +version = "1.17.106" +description = "Type annotations for boto3 1.17.106, generated by mypy-boto3-buider 4.22.1" category = "dev" optional = false python-versions = ">=3.6" @@ -117,282 +117,282 @@ botocore-stubs = "*" typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (==1.17.103.post1)"] -acm = ["mypy-boto3-acm (==1.17.103.post1)"] -acm-pca = ["mypy-boto3-acm-pca (==1.17.103.post1)"] -alexaforbusiness = ["mypy-boto3-alexaforbusiness (==1.17.103.post1)"] -all = ["mypy-boto3-accessanalyzer (==1.17.103.post1)", "mypy-boto3-acm (==1.17.103.post1)", "mypy-boto3-acm-pca (==1.17.103.post1)", "mypy-boto3-alexaforbusiness (==1.17.103.post1)", "mypy-boto3-amp (==1.17.103.post1)", "mypy-boto3-amplify (==1.17.103.post1)", "mypy-boto3-amplifybackend (==1.17.103.post1)", "mypy-boto3-apigateway (==1.17.103.post1)", "mypy-boto3-apigatewaymanagementapi (==1.17.103.post1)", "mypy-boto3-apigatewayv2 (==1.17.103.post1)", "mypy-boto3-appconfig (==1.17.103.post1)", "mypy-boto3-appflow (==1.17.103.post1)", "mypy-boto3-appintegrations (==1.17.103.post1)", "mypy-boto3-application-autoscaling (==1.17.103.post1)", "mypy-boto3-application-insights (==1.17.103.post1)", "mypy-boto3-applicationcostprofiler (==1.17.103.post1)", "mypy-boto3-appmesh (==1.17.103.post1)", "mypy-boto3-apprunner (==1.17.103.post1)", "mypy-boto3-appstream (==1.17.103.post1)", "mypy-boto3-appsync (==1.17.103.post1)", "mypy-boto3-athena (==1.17.103.post1)", "mypy-boto3-auditmanager (==1.17.103.post1)", "mypy-boto3-autoscaling (==1.17.103.post1)", "mypy-boto3-autoscaling-plans (==1.17.103.post1)", "mypy-boto3-backup (==1.17.103.post1)", "mypy-boto3-batch (==1.17.103.post1)", "mypy-boto3-braket (==1.17.103.post1)", "mypy-boto3-budgets (==1.17.103.post1)", "mypy-boto3-ce (==1.17.103.post1)", "mypy-boto3-chime (==1.17.103.post1)", "mypy-boto3-cloud9 (==1.17.103.post1)", "mypy-boto3-clouddirectory (==1.17.103.post1)", "mypy-boto3-cloudformation (==1.17.103.post1)", "mypy-boto3-cloudfront (==1.17.103.post1)", "mypy-boto3-cloudhsm (==1.17.103.post1)", "mypy-boto3-cloudhsmv2 (==1.17.103.post1)", "mypy-boto3-cloudsearch (==1.17.103.post1)", "mypy-boto3-cloudsearchdomain (==1.17.103.post1)", "mypy-boto3-cloudtrail (==1.17.103.post1)", "mypy-boto3-cloudwatch (==1.17.103.post1)", "mypy-boto3-codeartifact (==1.17.103.post1)", "mypy-boto3-codebuild (==1.17.103.post1)", "mypy-boto3-codecommit (==1.17.103.post1)", "mypy-boto3-codedeploy (==1.17.103.post1)", "mypy-boto3-codeguru-reviewer (==1.17.103.post1)", "mypy-boto3-codeguruprofiler (==1.17.103.post1)", "mypy-boto3-codepipeline (==1.17.103.post1)", "mypy-boto3-codestar (==1.17.103.post1)", "mypy-boto3-codestar-connections (==1.17.103.post1)", "mypy-boto3-codestar-notifications (==1.17.103.post1)", "mypy-boto3-cognito-identity (==1.17.103.post1)", "mypy-boto3-cognito-idp (==1.17.103.post1)", "mypy-boto3-cognito-sync (==1.17.103.post1)", "mypy-boto3-comprehend (==1.17.103.post1)", "mypy-boto3-comprehendmedical (==1.17.103.post1)", "mypy-boto3-compute-optimizer (==1.17.103.post1)", "mypy-boto3-config (==1.17.103.post1)", "mypy-boto3-connect (==1.17.103.post1)", "mypy-boto3-connect-contact-lens (==1.17.103.post1)", "mypy-boto3-connectparticipant (==1.17.103.post1)", "mypy-boto3-cur (==1.17.103.post1)", "mypy-boto3-customer-profiles (==1.17.103.post1)", "mypy-boto3-databrew (==1.17.103.post1)", "mypy-boto3-dataexchange (==1.17.103.post1)", "mypy-boto3-datapipeline (==1.17.103.post1)", "mypy-boto3-datasync (==1.17.103.post1)", "mypy-boto3-dax (==1.17.103.post1)", "mypy-boto3-detective (==1.17.103.post1)", "mypy-boto3-devicefarm (==1.17.103.post1)", "mypy-boto3-devops-guru (==1.17.103.post1)", "mypy-boto3-directconnect (==1.17.103.post1)", "mypy-boto3-discovery (==1.17.103.post1)", "mypy-boto3-dlm (==1.17.103.post1)", "mypy-boto3-dms (==1.17.103.post1)", "mypy-boto3-docdb (==1.17.103.post1)", "mypy-boto3-ds (==1.17.103.post1)", "mypy-boto3-dynamodb (==1.17.103.post1)", "mypy-boto3-dynamodbstreams (==1.17.103.post1)", "mypy-boto3-ebs (==1.17.103.post1)", "mypy-boto3-ec2 (==1.17.103.post1)", "mypy-boto3-ec2-instance-connect (==1.17.103.post1)", "mypy-boto3-ecr (==1.17.103.post1)", "mypy-boto3-ecr-public (==1.17.103.post1)", "mypy-boto3-ecs (==1.17.103.post1)", "mypy-boto3-efs (==1.17.103.post1)", "mypy-boto3-eks (==1.17.103.post1)", "mypy-boto3-elastic-inference (==1.17.103.post1)", "mypy-boto3-elasticache (==1.17.103.post1)", "mypy-boto3-elasticbeanstalk (==1.17.103.post1)", "mypy-boto3-elastictranscoder (==1.17.103.post1)", "mypy-boto3-elb (==1.17.103.post1)", "mypy-boto3-elbv2 (==1.17.103.post1)", "mypy-boto3-emr (==1.17.103.post1)", "mypy-boto3-emr-containers (==1.17.103.post1)", "mypy-boto3-es (==1.17.103.post1)", "mypy-boto3-events (==1.17.103.post1)", "mypy-boto3-finspace (==1.17.103.post1)", "mypy-boto3-finspace-data (==1.17.103.post1)", "mypy-boto3-firehose (==1.17.103.post1)", "mypy-boto3-fis (==1.17.103.post1)", "mypy-boto3-fms (==1.17.103.post1)", "mypy-boto3-forecast (==1.17.103.post1)", "mypy-boto3-forecastquery (==1.17.103.post1)", "mypy-boto3-frauddetector (==1.17.103.post1)", "mypy-boto3-fsx (==1.17.103.post1)", "mypy-boto3-gamelift (==1.17.103.post1)", "mypy-boto3-glacier (==1.17.103.post1)", "mypy-boto3-globalaccelerator (==1.17.103.post1)", "mypy-boto3-glue (==1.17.103.post1)", "mypy-boto3-greengrass (==1.17.103.post1)", "mypy-boto3-greengrassv2 (==1.17.103.post1)", "mypy-boto3-groundstation (==1.17.103.post1)", "mypy-boto3-guardduty (==1.17.103.post1)", "mypy-boto3-health (==1.17.103.post1)", "mypy-boto3-healthlake (==1.17.103.post1)", "mypy-boto3-honeycode (==1.17.103.post1)", "mypy-boto3-iam (==1.17.103.post1)", "mypy-boto3-identitystore (==1.17.103.post1)", "mypy-boto3-imagebuilder (==1.17.103.post1)", "mypy-boto3-importexport (==1.17.103.post1)", "mypy-boto3-inspector (==1.17.103.post1)", "mypy-boto3-iot (==1.17.103.post1)", "mypy-boto3-iot-data (==1.17.103.post1)", "mypy-boto3-iot-jobs-data (==1.17.103.post1)", "mypy-boto3-iot1click-devices (==1.17.103.post1)", "mypy-boto3-iot1click-projects (==1.17.103.post1)", "mypy-boto3-iotanalytics (==1.17.103.post1)", "mypy-boto3-iotdeviceadvisor (==1.17.103.post1)", "mypy-boto3-iotevents (==1.17.103.post1)", "mypy-boto3-iotevents-data (==1.17.103.post1)", "mypy-boto3-iotfleethub (==1.17.103.post1)", "mypy-boto3-iotsecuretunneling (==1.17.103.post1)", "mypy-boto3-iotsitewise (==1.17.103.post1)", "mypy-boto3-iotthingsgraph (==1.17.103.post1)", "mypy-boto3-iotwireless (==1.17.103.post1)", "mypy-boto3-ivs (==1.17.103.post1)", "mypy-boto3-kafka (==1.17.103.post1)", "mypy-boto3-kendra (==1.17.103.post1)", "mypy-boto3-kinesis (==1.17.103.post1)", "mypy-boto3-kinesis-video-archived-media (==1.17.103.post1)", "mypy-boto3-kinesis-video-media (==1.17.103.post1)", "mypy-boto3-kinesis-video-signaling (==1.17.103.post1)", "mypy-boto3-kinesisanalytics (==1.17.103.post1)", "mypy-boto3-kinesisanalyticsv2 (==1.17.103.post1)", "mypy-boto3-kinesisvideo (==1.17.103.post1)", "mypy-boto3-kms (==1.17.103.post1)", "mypy-boto3-lakeformation (==1.17.103.post1)", "mypy-boto3-lambda (==1.17.103.post1)", "mypy-boto3-lex-models (==1.17.103.post1)", "mypy-boto3-lex-runtime (==1.17.103.post1)", "mypy-boto3-lexv2-models (==1.17.103.post1)", "mypy-boto3-lexv2-runtime (==1.17.103.post1)", "mypy-boto3-license-manager (==1.17.103.post1)", "mypy-boto3-lightsail (==1.17.103.post1)", "mypy-boto3-location (==1.17.103.post1)", "mypy-boto3-logs (==1.17.103.post1)", "mypy-boto3-lookoutequipment (==1.17.103.post1)", "mypy-boto3-lookoutmetrics (==1.17.103.post1)", "mypy-boto3-lookoutvision (==1.17.103.post1)", "mypy-boto3-machinelearning (==1.17.103.post1)", "mypy-boto3-macie (==1.17.103.post1)", "mypy-boto3-macie2 (==1.17.103.post1)", "mypy-boto3-managedblockchain (==1.17.103.post1)", "mypy-boto3-marketplace-catalog (==1.17.103.post1)", "mypy-boto3-marketplace-entitlement (==1.17.103.post1)", "mypy-boto3-marketplacecommerceanalytics (==1.17.103.post1)", "mypy-boto3-mediaconnect (==1.17.103.post1)", "mypy-boto3-mediaconvert (==1.17.103.post1)", "mypy-boto3-medialive (==1.17.103.post1)", "mypy-boto3-mediapackage (==1.17.103.post1)", "mypy-boto3-mediapackage-vod (==1.17.103.post1)", "mypy-boto3-mediastore (==1.17.103.post1)", "mypy-boto3-mediastore-data (==1.17.103.post1)", "mypy-boto3-mediatailor (==1.17.103.post1)", "mypy-boto3-meteringmarketplace (==1.17.103.post1)", "mypy-boto3-mgh (==1.17.103.post1)", "mypy-boto3-mgn (==1.17.103.post1)", "mypy-boto3-migrationhub-config (==1.17.103.post1)", "mypy-boto3-mobile (==1.17.103.post1)", "mypy-boto3-mq (==1.17.103.post1)", "mypy-boto3-mturk (==1.17.103.post1)", "mypy-boto3-mwaa (==1.17.103.post1)", "mypy-boto3-neptune (==1.17.103.post1)", "mypy-boto3-network-firewall (==1.17.103.post1)", "mypy-boto3-networkmanager (==1.17.103.post1)", "mypy-boto3-nimble (==1.17.103.post1)", "mypy-boto3-opsworks (==1.17.103.post1)", "mypy-boto3-opsworkscm (==1.17.103.post1)", "mypy-boto3-organizations (==1.17.103.post1)", "mypy-boto3-outposts (==1.17.103.post1)", "mypy-boto3-personalize (==1.17.103.post1)", "mypy-boto3-personalize-events (==1.17.103.post1)", "mypy-boto3-personalize-runtime (==1.17.103.post1)", "mypy-boto3-pi (==1.17.103.post1)", "mypy-boto3-pinpoint (==1.17.103.post1)", "mypy-boto3-pinpoint-email (==1.17.103.post1)", "mypy-boto3-pinpoint-sms-voice (==1.17.103.post1)", "mypy-boto3-polly (==1.17.103.post1)", "mypy-boto3-pricing (==1.17.103.post1)", "mypy-boto3-proton (==1.17.103.post1)", "mypy-boto3-qldb (==1.17.103.post1)", "mypy-boto3-qldb-session (==1.17.103.post1)", "mypy-boto3-quicksight (==1.17.103.post1)", "mypy-boto3-ram (==1.17.103.post1)", "mypy-boto3-rds (==1.17.103.post1)", "mypy-boto3-rds-data (==1.17.103.post1)", "mypy-boto3-redshift (==1.17.103.post1)", "mypy-boto3-redshift-data (==1.17.103.post1)", "mypy-boto3-rekognition (==1.17.103.post1)", "mypy-boto3-resource-groups (==1.17.103.post1)", "mypy-boto3-resourcegroupstaggingapi (==1.17.103.post1)", "mypy-boto3-robomaker (==1.17.103.post1)", "mypy-boto3-route53 (==1.17.103.post1)", "mypy-boto3-route53domains (==1.17.103.post1)", "mypy-boto3-route53resolver (==1.17.103.post1)", "mypy-boto3-s3 (==1.17.103.post1)", "mypy-boto3-s3control (==1.17.103.post1)", "mypy-boto3-s3outposts (==1.17.103.post1)", "mypy-boto3-sagemaker (==1.17.103.post1)", "mypy-boto3-sagemaker-a2i-runtime (==1.17.103.post1)", "mypy-boto3-sagemaker-edge (==1.17.103.post1)", "mypy-boto3-sagemaker-featurestore-runtime (==1.17.103.post1)", "mypy-boto3-sagemaker-runtime (==1.17.103.post1)", "mypy-boto3-savingsplans (==1.17.103.post1)", "mypy-boto3-schemas (==1.17.103.post1)", "mypy-boto3-sdb (==1.17.103.post1)", "mypy-boto3-secretsmanager (==1.17.103.post1)", "mypy-boto3-securityhub (==1.17.103.post1)", "mypy-boto3-serverlessrepo (==1.17.103.post1)", "mypy-boto3-service-quotas (==1.17.103.post1)", "mypy-boto3-servicecatalog (==1.17.103.post1)", "mypy-boto3-servicecatalog-appregistry (==1.17.103.post1)", "mypy-boto3-servicediscovery (==1.17.103.post1)", "mypy-boto3-ses (==1.17.103.post1)", "mypy-boto3-sesv2 (==1.17.103.post1)", "mypy-boto3-shield (==1.17.103.post1)", "mypy-boto3-signer (==1.17.103.post1)", "mypy-boto3-sms (==1.17.103.post1)", "mypy-boto3-sms-voice (==1.17.103.post1)", "mypy-boto3-snowball (==1.17.103.post1)", "mypy-boto3-sns (==1.17.103.post1)", "mypy-boto3-sqs (==1.17.103.post1)", "mypy-boto3-ssm (==1.17.103.post1)", "mypy-boto3-ssm-contacts (==1.17.103.post1)", "mypy-boto3-ssm-incidents (==1.17.103.post1)", "mypy-boto3-sso (==1.17.103.post1)", "mypy-boto3-sso-admin (==1.17.103.post1)", "mypy-boto3-sso-oidc (==1.17.103.post1)", "mypy-boto3-stepfunctions (==1.17.103.post1)", "mypy-boto3-storagegateway (==1.17.103.post1)", "mypy-boto3-sts (==1.17.103.post1)", "mypy-boto3-support (==1.17.103.post1)", "mypy-boto3-swf (==1.17.103.post1)", "mypy-boto3-synthetics (==1.17.103.post1)", "mypy-boto3-textract (==1.17.103.post1)", "mypy-boto3-timestream-query (==1.17.103.post1)", "mypy-boto3-timestream-write (==1.17.103.post1)", "mypy-boto3-transcribe (==1.17.103.post1)", "mypy-boto3-transfer (==1.17.103.post1)", "mypy-boto3-translate (==1.17.103.post1)", "mypy-boto3-waf (==1.17.103.post1)", "mypy-boto3-waf-regional (==1.17.103.post1)", "mypy-boto3-wafv2 (==1.17.103.post1)", "mypy-boto3-wellarchitected (==1.17.103.post1)", "mypy-boto3-workdocs (==1.17.103.post1)", "mypy-boto3-worklink (==1.17.103.post1)", "mypy-boto3-workmail (==1.17.103.post1)", "mypy-boto3-workmailmessageflow (==1.17.103.post1)", "mypy-boto3-workspaces (==1.17.103.post1)", "mypy-boto3-xray (==1.17.103.post1)"] -amp = ["mypy-boto3-amp (==1.17.103.post1)"] -amplify = ["mypy-boto3-amplify (==1.17.103.post1)"] -amplifybackend = ["mypy-boto3-amplifybackend (==1.17.103.post1)"] -apigateway = ["mypy-boto3-apigateway (==1.17.103.post1)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (==1.17.103.post1)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (==1.17.103.post1)"] -appconfig = ["mypy-boto3-appconfig (==1.17.103.post1)"] -appflow = ["mypy-boto3-appflow (==1.17.103.post1)"] -appintegrations = ["mypy-boto3-appintegrations (==1.17.103.post1)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (==1.17.103.post1)"] -application-insights = ["mypy-boto3-application-insights (==1.17.103.post1)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (==1.17.103.post1)"] -appmesh = ["mypy-boto3-appmesh (==1.17.103.post1)"] -apprunner = ["mypy-boto3-apprunner (==1.17.103.post1)"] -appstream = ["mypy-boto3-appstream (==1.17.103.post1)"] -appsync = ["mypy-boto3-appsync (==1.17.103.post1)"] -athena = ["mypy-boto3-athena (==1.17.103.post1)"] -auditmanager = ["mypy-boto3-auditmanager (==1.17.103.post1)"] -autoscaling = ["mypy-boto3-autoscaling (==1.17.103.post1)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (==1.17.103.post1)"] -backup = ["mypy-boto3-backup (==1.17.103.post1)"] -batch = ["mypy-boto3-batch (==1.17.103.post1)"] -braket = ["mypy-boto3-braket (==1.17.103.post1)"] -budgets = ["mypy-boto3-budgets (==1.17.103.post1)"] -ce = ["mypy-boto3-ce (==1.17.103.post1)"] -chime = ["mypy-boto3-chime (==1.17.103.post1)"] -cloud9 = ["mypy-boto3-cloud9 (==1.17.103.post1)"] -clouddirectory = ["mypy-boto3-clouddirectory (==1.17.103.post1)"] -cloudformation = ["mypy-boto3-cloudformation (==1.17.103.post1)"] -cloudfront = ["mypy-boto3-cloudfront (==1.17.103.post1)"] -cloudhsm = ["mypy-boto3-cloudhsm (==1.17.103.post1)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (==1.17.103.post1)"] -cloudsearch = ["mypy-boto3-cloudsearch (==1.17.103.post1)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (==1.17.103.post1)"] -cloudtrail = ["mypy-boto3-cloudtrail (==1.17.103.post1)"] -cloudwatch = ["mypy-boto3-cloudwatch (==1.17.103.post1)"] -codeartifact = ["mypy-boto3-codeartifact (==1.17.103.post1)"] -codebuild = ["mypy-boto3-codebuild (==1.17.103.post1)"] -codecommit = ["mypy-boto3-codecommit (==1.17.103.post1)"] -codedeploy = ["mypy-boto3-codedeploy (==1.17.103.post1)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (==1.17.103.post1)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (==1.17.103.post1)"] -codepipeline = ["mypy-boto3-codepipeline (==1.17.103.post1)"] -codestar = ["mypy-boto3-codestar (==1.17.103.post1)"] -codestar-connections = ["mypy-boto3-codestar-connections (==1.17.103.post1)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (==1.17.103.post1)"] -cognito-identity = ["mypy-boto3-cognito-identity (==1.17.103.post1)"] -cognito-idp = ["mypy-boto3-cognito-idp (==1.17.103.post1)"] -cognito-sync = ["mypy-boto3-cognito-sync (==1.17.103.post1)"] -comprehend = ["mypy-boto3-comprehend (==1.17.103.post1)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (==1.17.103.post1)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (==1.17.103.post1)"] -config = ["mypy-boto3-config (==1.17.103.post1)"] -connect = ["mypy-boto3-connect (==1.17.103.post1)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (==1.17.103.post1)"] -connectparticipant = ["mypy-boto3-connectparticipant (==1.17.103.post1)"] -cur = ["mypy-boto3-cur (==1.17.103.post1)"] -customer-profiles = ["mypy-boto3-customer-profiles (==1.17.103.post1)"] -databrew = ["mypy-boto3-databrew (==1.17.103.post1)"] -dataexchange = ["mypy-boto3-dataexchange (==1.17.103.post1)"] -datapipeline = ["mypy-boto3-datapipeline (==1.17.103.post1)"] -datasync = ["mypy-boto3-datasync (==1.17.103.post1)"] -dax = ["mypy-boto3-dax (==1.17.103.post1)"] -detective = ["mypy-boto3-detective (==1.17.103.post1)"] -devicefarm = ["mypy-boto3-devicefarm (==1.17.103.post1)"] -devops-guru = ["mypy-boto3-devops-guru (==1.17.103.post1)"] -directconnect = ["mypy-boto3-directconnect (==1.17.103.post1)"] -discovery = ["mypy-boto3-discovery (==1.17.103.post1)"] -dlm = ["mypy-boto3-dlm (==1.17.103.post1)"] -dms = ["mypy-boto3-dms (==1.17.103.post1)"] -docdb = ["mypy-boto3-docdb (==1.17.103.post1)"] -ds = ["mypy-boto3-ds (==1.17.103.post1)"] -dynamodb = ["mypy-boto3-dynamodb (==1.17.103.post1)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (==1.17.103.post1)"] -ebs = ["mypy-boto3-ebs (==1.17.103.post1)"] -ec2 = ["mypy-boto3-ec2 (==1.17.103.post1)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (==1.17.103.post1)"] -ecr = ["mypy-boto3-ecr (==1.17.103.post1)"] -ecr-public = ["mypy-boto3-ecr-public (==1.17.103.post1)"] -ecs = ["mypy-boto3-ecs (==1.17.103.post1)"] -efs = ["mypy-boto3-efs (==1.17.103.post1)"] -eks = ["mypy-boto3-eks (==1.17.103.post1)"] -elastic-inference = ["mypy-boto3-elastic-inference (==1.17.103.post1)"] -elasticache = ["mypy-boto3-elasticache (==1.17.103.post1)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (==1.17.103.post1)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (==1.17.103.post1)"] -elb = ["mypy-boto3-elb (==1.17.103.post1)"] -elbv2 = ["mypy-boto3-elbv2 (==1.17.103.post1)"] -emr = ["mypy-boto3-emr (==1.17.103.post1)"] -emr-containers = ["mypy-boto3-emr-containers (==1.17.103.post1)"] -es = ["mypy-boto3-es (==1.17.103.post1)"] -essential = ["mypy-boto3-cloudformation (==1.17.103.post1)", "mypy-boto3-dynamodb (==1.17.103.post1)", "mypy-boto3-ec2 (==1.17.103.post1)", "mypy-boto3-lambda (==1.17.103.post1)", "mypy-boto3-rds (==1.17.103.post1)", "mypy-boto3-s3 (==1.17.103.post1)", "mypy-boto3-sqs (==1.17.103.post1)"] -events = ["mypy-boto3-events (==1.17.103.post1)"] -finspace = ["mypy-boto3-finspace (==1.17.103.post1)"] -finspace-data = ["mypy-boto3-finspace-data (==1.17.103.post1)"] -firehose = ["mypy-boto3-firehose (==1.17.103.post1)"] -fis = ["mypy-boto3-fis (==1.17.103.post1)"] -fms = ["mypy-boto3-fms (==1.17.103.post1)"] -forecast = ["mypy-boto3-forecast (==1.17.103.post1)"] -forecastquery = ["mypy-boto3-forecastquery (==1.17.103.post1)"] -frauddetector = ["mypy-boto3-frauddetector (==1.17.103.post1)"] -fsx = ["mypy-boto3-fsx (==1.17.103.post1)"] -gamelift = ["mypy-boto3-gamelift (==1.17.103.post1)"] -glacier = ["mypy-boto3-glacier (==1.17.103.post1)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (==1.17.103.post1)"] -glue = ["mypy-boto3-glue (==1.17.103.post1)"] -greengrass = ["mypy-boto3-greengrass (==1.17.103.post1)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (==1.17.103.post1)"] -groundstation = ["mypy-boto3-groundstation (==1.17.103.post1)"] -guardduty = ["mypy-boto3-guardduty (==1.17.103.post1)"] -health = ["mypy-boto3-health (==1.17.103.post1)"] -healthlake = ["mypy-boto3-healthlake (==1.17.103.post1)"] -honeycode = ["mypy-boto3-honeycode (==1.17.103.post1)"] -iam = ["mypy-boto3-iam (==1.17.103.post1)"] -identitystore = ["mypy-boto3-identitystore (==1.17.103.post1)"] -imagebuilder = ["mypy-boto3-imagebuilder (==1.17.103.post1)"] -importexport = ["mypy-boto3-importexport (==1.17.103.post1)"] -inspector = ["mypy-boto3-inspector (==1.17.103.post1)"] -iot = ["mypy-boto3-iot (==1.17.103.post1)"] -iot-data = ["mypy-boto3-iot-data (==1.17.103.post1)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (==1.17.103.post1)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (==1.17.103.post1)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (==1.17.103.post1)"] -iotanalytics = ["mypy-boto3-iotanalytics (==1.17.103.post1)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (==1.17.103.post1)"] -iotevents = ["mypy-boto3-iotevents (==1.17.103.post1)"] -iotevents-data = ["mypy-boto3-iotevents-data (==1.17.103.post1)"] -iotfleethub = ["mypy-boto3-iotfleethub (==1.17.103.post1)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (==1.17.103.post1)"] -iotsitewise = ["mypy-boto3-iotsitewise (==1.17.103.post1)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (==1.17.103.post1)"] -iotwireless = ["mypy-boto3-iotwireless (==1.17.103.post1)"] -ivs = ["mypy-boto3-ivs (==1.17.103.post1)"] -kafka = ["mypy-boto3-kafka (==1.17.103.post1)"] -kendra = ["mypy-boto3-kendra (==1.17.103.post1)"] -kinesis = ["mypy-boto3-kinesis (==1.17.103.post1)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (==1.17.103.post1)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (==1.17.103.post1)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (==1.17.103.post1)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (==1.17.103.post1)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (==1.17.103.post1)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (==1.17.103.post1)"] -kms = ["mypy-boto3-kms (==1.17.103.post1)"] -lakeformation = ["mypy-boto3-lakeformation (==1.17.103.post1)"] -lambda = ["mypy-boto3-lambda (==1.17.103.post1)"] -lex-models = ["mypy-boto3-lex-models (==1.17.103.post1)"] -lex-runtime = ["mypy-boto3-lex-runtime (==1.17.103.post1)"] -lexv2-models = ["mypy-boto3-lexv2-models (==1.17.103.post1)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (==1.17.103.post1)"] -license-manager = ["mypy-boto3-license-manager (==1.17.103.post1)"] -lightsail = ["mypy-boto3-lightsail (==1.17.103.post1)"] -location = ["mypy-boto3-location (==1.17.103.post1)"] -logs = ["mypy-boto3-logs (==1.17.103.post1)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (==1.17.103.post1)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (==1.17.103.post1)"] -lookoutvision = ["mypy-boto3-lookoutvision (==1.17.103.post1)"] -machinelearning = ["mypy-boto3-machinelearning (==1.17.103.post1)"] -macie = ["mypy-boto3-macie (==1.17.103.post1)"] -macie2 = ["mypy-boto3-macie2 (==1.17.103.post1)"] -managedblockchain = ["mypy-boto3-managedblockchain (==1.17.103.post1)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (==1.17.103.post1)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (==1.17.103.post1)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (==1.17.103.post1)"] -mediaconnect = ["mypy-boto3-mediaconnect (==1.17.103.post1)"] -mediaconvert = ["mypy-boto3-mediaconvert (==1.17.103.post1)"] -medialive = ["mypy-boto3-medialive (==1.17.103.post1)"] -mediapackage = ["mypy-boto3-mediapackage (==1.17.103.post1)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (==1.17.103.post1)"] -mediastore = ["mypy-boto3-mediastore (==1.17.103.post1)"] -mediastore-data = ["mypy-boto3-mediastore-data (==1.17.103.post1)"] -mediatailor = ["mypy-boto3-mediatailor (==1.17.103.post1)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (==1.17.103.post1)"] -mgh = ["mypy-boto3-mgh (==1.17.103.post1)"] -mgn = ["mypy-boto3-mgn (==1.17.103.post1)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (==1.17.103.post1)"] -mobile = ["mypy-boto3-mobile (==1.17.103.post1)"] -mq = ["mypy-boto3-mq (==1.17.103.post1)"] -mturk = ["mypy-boto3-mturk (==1.17.103.post1)"] -mwaa = ["mypy-boto3-mwaa (==1.17.103.post1)"] -neptune = ["mypy-boto3-neptune (==1.17.103.post1)"] -network-firewall = ["mypy-boto3-network-firewall (==1.17.103.post1)"] -networkmanager = ["mypy-boto3-networkmanager (==1.17.103.post1)"] -nimble = ["mypy-boto3-nimble (==1.17.103.post1)"] -opsworks = ["mypy-boto3-opsworks (==1.17.103.post1)"] -opsworkscm = ["mypy-boto3-opsworkscm (==1.17.103.post1)"] -organizations = ["mypy-boto3-organizations (==1.17.103.post1)"] -outposts = ["mypy-boto3-outposts (==1.17.103.post1)"] -personalize = ["mypy-boto3-personalize (==1.17.103.post1)"] -personalize-events = ["mypy-boto3-personalize-events (==1.17.103.post1)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (==1.17.103.post1)"] -pi = ["mypy-boto3-pi (==1.17.103.post1)"] -pinpoint = ["mypy-boto3-pinpoint (==1.17.103.post1)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (==1.17.103.post1)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (==1.17.103.post1)"] -polly = ["mypy-boto3-polly (==1.17.103.post1)"] -pricing = ["mypy-boto3-pricing (==1.17.103.post1)"] -proton = ["mypy-boto3-proton (==1.17.103.post1)"] -qldb = ["mypy-boto3-qldb (==1.17.103.post1)"] -qldb-session = ["mypy-boto3-qldb-session (==1.17.103.post1)"] -quicksight = ["mypy-boto3-quicksight (==1.17.103.post1)"] -ram = ["mypy-boto3-ram (==1.17.103.post1)"] -rds = ["mypy-boto3-rds (==1.17.103.post1)"] -rds-data = ["mypy-boto3-rds-data (==1.17.103.post1)"] -redshift = ["mypy-boto3-redshift (==1.17.103.post1)"] -redshift-data = ["mypy-boto3-redshift-data (==1.17.103.post1)"] -rekognition = ["mypy-boto3-rekognition (==1.17.103.post1)"] -resource-groups = ["mypy-boto3-resource-groups (==1.17.103.post1)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (==1.17.103.post1)"] -robomaker = ["mypy-boto3-robomaker (==1.17.103.post1)"] -route53 = ["mypy-boto3-route53 (==1.17.103.post1)"] -route53domains = ["mypy-boto3-route53domains (==1.17.103.post1)"] -route53resolver = ["mypy-boto3-route53resolver (==1.17.103.post1)"] -s3 = ["mypy-boto3-s3 (==1.17.103.post1)"] -s3control = ["mypy-boto3-s3control (==1.17.103.post1)"] -s3outposts = ["mypy-boto3-s3outposts (==1.17.103.post1)"] -sagemaker = ["mypy-boto3-sagemaker (==1.17.103.post1)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (==1.17.103.post1)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (==1.17.103.post1)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (==1.17.103.post1)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (==1.17.103.post1)"] -savingsplans = ["mypy-boto3-savingsplans (==1.17.103.post1)"] -schemas = ["mypy-boto3-schemas (==1.17.103.post1)"] -sdb = ["mypy-boto3-sdb (==1.17.103.post1)"] -secretsmanager = ["mypy-boto3-secretsmanager (==1.17.103.post1)"] -securityhub = ["mypy-boto3-securityhub (==1.17.103.post1)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (==1.17.103.post1)"] -service-quotas = ["mypy-boto3-service-quotas (==1.17.103.post1)"] -servicecatalog = ["mypy-boto3-servicecatalog (==1.17.103.post1)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (==1.17.103.post1)"] -servicediscovery = ["mypy-boto3-servicediscovery (==1.17.103.post1)"] -ses = ["mypy-boto3-ses (==1.17.103.post1)"] -sesv2 = ["mypy-boto3-sesv2 (==1.17.103.post1)"] -shield = ["mypy-boto3-shield (==1.17.103.post1)"] -signer = ["mypy-boto3-signer (==1.17.103.post1)"] -sms = ["mypy-boto3-sms (==1.17.103.post1)"] -sms-voice = ["mypy-boto3-sms-voice (==1.17.103.post1)"] -snowball = ["mypy-boto3-snowball (==1.17.103.post1)"] -sns = ["mypy-boto3-sns (==1.17.103.post1)"] -sqs = ["mypy-boto3-sqs (==1.17.103.post1)"] -ssm = ["mypy-boto3-ssm (==1.17.103.post1)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (==1.17.103.post1)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (==1.17.103.post1)"] -sso = ["mypy-boto3-sso (==1.17.103.post1)"] -sso-admin = ["mypy-boto3-sso-admin (==1.17.103.post1)"] -sso-oidc = ["mypy-boto3-sso-oidc (==1.17.103.post1)"] -stepfunctions = ["mypy-boto3-stepfunctions (==1.17.103.post1)"] -storagegateway = ["mypy-boto3-storagegateway (==1.17.103.post1)"] -sts = ["mypy-boto3-sts (==1.17.103.post1)"] -support = ["mypy-boto3-support (==1.17.103.post1)"] -swf = ["mypy-boto3-swf (==1.17.103.post1)"] -synthetics = ["mypy-boto3-synthetics (==1.17.103.post1)"] -textract = ["mypy-boto3-textract (==1.17.103.post1)"] -timestream-query = ["mypy-boto3-timestream-query (==1.17.103.post1)"] -timestream-write = ["mypy-boto3-timestream-write (==1.17.103.post1)"] -transcribe = ["mypy-boto3-transcribe (==1.17.103.post1)"] -transfer = ["mypy-boto3-transfer (==1.17.103.post1)"] -translate = ["mypy-boto3-translate (==1.17.103.post1)"] -waf = ["mypy-boto3-waf (==1.17.103.post1)"] -waf-regional = ["mypy-boto3-waf-regional (==1.17.103.post1)"] -wafv2 = ["mypy-boto3-wafv2 (==1.17.103.post1)"] -wellarchitected = ["mypy-boto3-wellarchitected (==1.17.103.post1)"] -workdocs = ["mypy-boto3-workdocs (==1.17.103.post1)"] -worklink = ["mypy-boto3-worklink (==1.17.103.post1)"] -workmail = ["mypy-boto3-workmail (==1.17.103.post1)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (==1.17.103.post1)"] -workspaces = ["mypy-boto3-workspaces (==1.17.103.post1)"] -xray = ["mypy-boto3-xray (==1.17.103.post1)"] +accessanalyzer = ["mypy-boto3-accessanalyzer (==1.17.106)"] +acm = ["mypy-boto3-acm (==1.17.106)"] +acm-pca = ["mypy-boto3-acm-pca (==1.17.106)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (==1.17.106)"] +all = ["mypy-boto3-accessanalyzer (==1.17.106)", "mypy-boto3-acm (==1.17.106)", "mypy-boto3-acm-pca (==1.17.106)", "mypy-boto3-alexaforbusiness (==1.17.106)", "mypy-boto3-amp (==1.17.106)", "mypy-boto3-amplify (==1.17.106)", "mypy-boto3-amplifybackend (==1.17.106)", "mypy-boto3-apigateway (==1.17.106)", "mypy-boto3-apigatewaymanagementapi (==1.17.106)", "mypy-boto3-apigatewayv2 (==1.17.106)", "mypy-boto3-appconfig (==1.17.106)", "mypy-boto3-appflow (==1.17.106)", "mypy-boto3-appintegrations (==1.17.106)", "mypy-boto3-application-autoscaling (==1.17.106)", "mypy-boto3-application-insights (==1.17.106)", "mypy-boto3-applicationcostprofiler (==1.17.106)", "mypy-boto3-appmesh (==1.17.106)", "mypy-boto3-apprunner (==1.17.106)", "mypy-boto3-appstream (==1.17.106)", "mypy-boto3-appsync (==1.17.106)", "mypy-boto3-athena (==1.17.106)", "mypy-boto3-auditmanager (==1.17.106)", "mypy-boto3-autoscaling (==1.17.106)", "mypy-boto3-autoscaling-plans (==1.17.106)", "mypy-boto3-backup (==1.17.106)", "mypy-boto3-batch (==1.17.106)", "mypy-boto3-braket (==1.17.106)", "mypy-boto3-budgets (==1.17.106)", "mypy-boto3-ce (==1.17.106)", "mypy-boto3-chime (==1.17.106)", "mypy-boto3-cloud9 (==1.17.106)", "mypy-boto3-clouddirectory (==1.17.106)", "mypy-boto3-cloudformation (==1.17.106)", "mypy-boto3-cloudfront (==1.17.106)", "mypy-boto3-cloudhsm (==1.17.106)", "mypy-boto3-cloudhsmv2 (==1.17.106)", "mypy-boto3-cloudsearch (==1.17.106)", "mypy-boto3-cloudsearchdomain (==1.17.106)", "mypy-boto3-cloudtrail (==1.17.106)", "mypy-boto3-cloudwatch (==1.17.106)", "mypy-boto3-codeartifact (==1.17.106)", "mypy-boto3-codebuild (==1.17.106)", "mypy-boto3-codecommit (==1.17.106)", "mypy-boto3-codedeploy (==1.17.106)", "mypy-boto3-codeguru-reviewer (==1.17.106)", "mypy-boto3-codeguruprofiler (==1.17.106)", "mypy-boto3-codepipeline (==1.17.106)", "mypy-boto3-codestar (==1.17.106)", "mypy-boto3-codestar-connections (==1.17.106)", "mypy-boto3-codestar-notifications (==1.17.106)", "mypy-boto3-cognito-identity (==1.17.106)", "mypy-boto3-cognito-idp (==1.17.106)", "mypy-boto3-cognito-sync (==1.17.106)", "mypy-boto3-comprehend (==1.17.106)", "mypy-boto3-comprehendmedical (==1.17.106)", "mypy-boto3-compute-optimizer (==1.17.106)", "mypy-boto3-config (==1.17.106)", "mypy-boto3-connect (==1.17.106)", "mypy-boto3-connect-contact-lens (==1.17.106)", "mypy-boto3-connectparticipant (==1.17.106)", "mypy-boto3-cur (==1.17.106)", "mypy-boto3-customer-profiles (==1.17.106)", "mypy-boto3-databrew (==1.17.106)", "mypy-boto3-dataexchange (==1.17.106)", "mypy-boto3-datapipeline (==1.17.106)", "mypy-boto3-datasync (==1.17.106)", "mypy-boto3-dax (==1.17.106)", "mypy-boto3-detective (==1.17.106)", "mypy-boto3-devicefarm (==1.17.106)", "mypy-boto3-devops-guru (==1.17.106)", "mypy-boto3-directconnect (==1.17.106)", "mypy-boto3-discovery (==1.17.106)", "mypy-boto3-dlm (==1.17.106)", "mypy-boto3-dms (==1.17.106)", "mypy-boto3-docdb (==1.17.106)", "mypy-boto3-ds (==1.17.106)", "mypy-boto3-dynamodb (==1.17.106)", "mypy-boto3-dynamodbstreams (==1.17.106)", "mypy-boto3-ebs (==1.17.106)", "mypy-boto3-ec2 (==1.17.106)", "mypy-boto3-ec2-instance-connect (==1.17.106)", "mypy-boto3-ecr (==1.17.106)", "mypy-boto3-ecr-public (==1.17.106)", "mypy-boto3-ecs (==1.17.106)", "mypy-boto3-efs (==1.17.106)", "mypy-boto3-eks (==1.17.106)", "mypy-boto3-elastic-inference (==1.17.106)", "mypy-boto3-elasticache (==1.17.106)", "mypy-boto3-elasticbeanstalk (==1.17.106)", "mypy-boto3-elastictranscoder (==1.17.106)", "mypy-boto3-elb (==1.17.106)", "mypy-boto3-elbv2 (==1.17.106)", "mypy-boto3-emr (==1.17.106)", "mypy-boto3-emr-containers (==1.17.106)", "mypy-boto3-es (==1.17.106)", "mypy-boto3-events (==1.17.106)", "mypy-boto3-finspace (==1.17.106)", "mypy-boto3-finspace-data (==1.17.106)", "mypy-boto3-firehose (==1.17.106)", "mypy-boto3-fis (==1.17.106)", "mypy-boto3-fms (==1.17.106)", "mypy-boto3-forecast (==1.17.106)", "mypy-boto3-forecastquery (==1.17.106)", "mypy-boto3-frauddetector (==1.17.106)", "mypy-boto3-fsx (==1.17.106)", "mypy-boto3-gamelift (==1.17.106)", "mypy-boto3-glacier (==1.17.106)", "mypy-boto3-globalaccelerator (==1.17.106)", "mypy-boto3-glue (==1.17.106)", "mypy-boto3-greengrass (==1.17.106)", "mypy-boto3-greengrassv2 (==1.17.106)", "mypy-boto3-groundstation (==1.17.106)", "mypy-boto3-guardduty (==1.17.106)", "mypy-boto3-health (==1.17.106)", "mypy-boto3-healthlake (==1.17.106)", "mypy-boto3-honeycode (==1.17.106)", "mypy-boto3-iam (==1.17.106)", "mypy-boto3-identitystore (==1.17.106)", "mypy-boto3-imagebuilder (==1.17.106)", "mypy-boto3-importexport (==1.17.106)", "mypy-boto3-inspector (==1.17.106)", "mypy-boto3-iot (==1.17.106)", "mypy-boto3-iot-data (==1.17.106)", "mypy-boto3-iot-jobs-data (==1.17.106)", "mypy-boto3-iot1click-devices (==1.17.106)", "mypy-boto3-iot1click-projects (==1.17.106)", "mypy-boto3-iotanalytics (==1.17.106)", "mypy-boto3-iotdeviceadvisor (==1.17.106)", "mypy-boto3-iotevents (==1.17.106)", "mypy-boto3-iotevents-data (==1.17.106)", "mypy-boto3-iotfleethub (==1.17.106)", "mypy-boto3-iotsecuretunneling (==1.17.106)", "mypy-boto3-iotsitewise (==1.17.106)", "mypy-boto3-iotthingsgraph (==1.17.106)", "mypy-boto3-iotwireless (==1.17.106)", "mypy-boto3-ivs (==1.17.106)", "mypy-boto3-kafka (==1.17.106)", "mypy-boto3-kendra (==1.17.106)", "mypy-boto3-kinesis (==1.17.106)", "mypy-boto3-kinesis-video-archived-media (==1.17.106)", "mypy-boto3-kinesis-video-media (==1.17.106)", "mypy-boto3-kinesis-video-signaling (==1.17.106)", "mypy-boto3-kinesisanalytics (==1.17.106)", "mypy-boto3-kinesisanalyticsv2 (==1.17.106)", "mypy-boto3-kinesisvideo (==1.17.106)", "mypy-boto3-kms (==1.17.106)", "mypy-boto3-lakeformation (==1.17.106)", "mypy-boto3-lambda (==1.17.106)", "mypy-boto3-lex-models (==1.17.106)", "mypy-boto3-lex-runtime (==1.17.106)", "mypy-boto3-lexv2-models (==1.17.106)", "mypy-boto3-lexv2-runtime (==1.17.106)", "mypy-boto3-license-manager (==1.17.106)", "mypy-boto3-lightsail (==1.17.106)", "mypy-boto3-location (==1.17.106)", "mypy-boto3-logs (==1.17.106)", "mypy-boto3-lookoutequipment (==1.17.106)", "mypy-boto3-lookoutmetrics (==1.17.106)", "mypy-boto3-lookoutvision (==1.17.106)", "mypy-boto3-machinelearning (==1.17.106)", "mypy-boto3-macie (==1.17.106)", "mypy-boto3-macie2 (==1.17.106)", "mypy-boto3-managedblockchain (==1.17.106)", "mypy-boto3-marketplace-catalog (==1.17.106)", "mypy-boto3-marketplace-entitlement (==1.17.106)", "mypy-boto3-marketplacecommerceanalytics (==1.17.106)", "mypy-boto3-mediaconnect (==1.17.106)", "mypy-boto3-mediaconvert (==1.17.106)", "mypy-boto3-medialive (==1.17.106)", "mypy-boto3-mediapackage (==1.17.106)", "mypy-boto3-mediapackage-vod (==1.17.106)", "mypy-boto3-mediastore (==1.17.106)", "mypy-boto3-mediastore-data (==1.17.106)", "mypy-boto3-mediatailor (==1.17.106)", "mypy-boto3-meteringmarketplace (==1.17.106)", "mypy-boto3-mgh (==1.17.106)", "mypy-boto3-mgn (==1.17.106)", "mypy-boto3-migrationhub-config (==1.17.106)", "mypy-boto3-mobile (==1.17.106)", "mypy-boto3-mq (==1.17.106)", "mypy-boto3-mturk (==1.17.106)", "mypy-boto3-mwaa (==1.17.106)", "mypy-boto3-neptune (==1.17.106)", "mypy-boto3-network-firewall (==1.17.106)", "mypy-boto3-networkmanager (==1.17.106)", "mypy-boto3-nimble (==1.17.106)", "mypy-boto3-opsworks (==1.17.106)", "mypy-boto3-opsworkscm (==1.17.106)", "mypy-boto3-organizations (==1.17.106)", "mypy-boto3-outposts (==1.17.106)", "mypy-boto3-personalize (==1.17.106)", "mypy-boto3-personalize-events (==1.17.106)", "mypy-boto3-personalize-runtime (==1.17.106)", "mypy-boto3-pi (==1.17.106)", "mypy-boto3-pinpoint (==1.17.106)", "mypy-boto3-pinpoint-email (==1.17.106)", "mypy-boto3-pinpoint-sms-voice (==1.17.106)", "mypy-boto3-polly (==1.17.106)", "mypy-boto3-pricing (==1.17.106)", "mypy-boto3-proton (==1.17.106)", "mypy-boto3-qldb (==1.17.106)", "mypy-boto3-qldb-session (==1.17.106)", "mypy-boto3-quicksight (==1.17.106)", "mypy-boto3-ram (==1.17.106)", "mypy-boto3-rds (==1.17.106)", "mypy-boto3-rds-data (==1.17.106)", "mypy-boto3-redshift (==1.17.106)", "mypy-boto3-redshift-data (==1.17.106)", "mypy-boto3-rekognition (==1.17.106)", "mypy-boto3-resource-groups (==1.17.106)", "mypy-boto3-resourcegroupstaggingapi (==1.17.106)", "mypy-boto3-robomaker (==1.17.106)", "mypy-boto3-route53 (==1.17.106)", "mypy-boto3-route53domains (==1.17.106)", "mypy-boto3-route53resolver (==1.17.106)", "mypy-boto3-s3 (==1.17.106)", "mypy-boto3-s3control (==1.17.106)", "mypy-boto3-s3outposts (==1.17.106)", "mypy-boto3-sagemaker (==1.17.106)", "mypy-boto3-sagemaker-a2i-runtime (==1.17.106)", "mypy-boto3-sagemaker-edge (==1.17.106)", "mypy-boto3-sagemaker-featurestore-runtime (==1.17.106)", "mypy-boto3-sagemaker-runtime (==1.17.106)", "mypy-boto3-savingsplans (==1.17.106)", "mypy-boto3-schemas (==1.17.106)", "mypy-boto3-sdb (==1.17.106)", "mypy-boto3-secretsmanager (==1.17.106)", "mypy-boto3-securityhub (==1.17.106)", "mypy-boto3-serverlessrepo (==1.17.106)", "mypy-boto3-service-quotas (==1.17.106)", "mypy-boto3-servicecatalog (==1.17.106)", "mypy-boto3-servicecatalog-appregistry (==1.17.106)", "mypy-boto3-servicediscovery (==1.17.106)", "mypy-boto3-ses (==1.17.106)", "mypy-boto3-sesv2 (==1.17.106)", "mypy-boto3-shield (==1.17.106)", "mypy-boto3-signer (==1.17.106)", "mypy-boto3-sms (==1.17.106)", "mypy-boto3-sms-voice (==1.17.106)", "mypy-boto3-snowball (==1.17.106)", "mypy-boto3-sns (==1.17.106)", "mypy-boto3-sqs (==1.17.106)", "mypy-boto3-ssm (==1.17.106)", "mypy-boto3-ssm-contacts (==1.17.106)", "mypy-boto3-ssm-incidents (==1.17.106)", "mypy-boto3-sso (==1.17.106)", "mypy-boto3-sso-admin (==1.17.106)", "mypy-boto3-sso-oidc (==1.17.106)", "mypy-boto3-stepfunctions (==1.17.106)", "mypy-boto3-storagegateway (==1.17.106)", "mypy-boto3-sts (==1.17.106)", "mypy-boto3-support (==1.17.106)", "mypy-boto3-swf (==1.17.106)", "mypy-boto3-synthetics (==1.17.106)", "mypy-boto3-textract (==1.17.106)", "mypy-boto3-timestream-query (==1.17.106)", "mypy-boto3-timestream-write (==1.17.106)", "mypy-boto3-transcribe (==1.17.106)", "mypy-boto3-transfer (==1.17.106)", "mypy-boto3-translate (==1.17.106)", "mypy-boto3-waf (==1.17.106)", "mypy-boto3-waf-regional (==1.17.106)", "mypy-boto3-wafv2 (==1.17.106)", "mypy-boto3-wellarchitected (==1.17.106)", "mypy-boto3-workdocs (==1.17.106)", "mypy-boto3-worklink (==1.17.106)", "mypy-boto3-workmail (==1.17.106)", "mypy-boto3-workmailmessageflow (==1.17.106)", "mypy-boto3-workspaces (==1.17.106)", "mypy-boto3-xray (==1.17.106)"] +amp = ["mypy-boto3-amp (==1.17.106)"] +amplify = ["mypy-boto3-amplify (==1.17.106)"] +amplifybackend = ["mypy-boto3-amplifybackend (==1.17.106)"] +apigateway = ["mypy-boto3-apigateway (==1.17.106)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (==1.17.106)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (==1.17.106)"] +appconfig = ["mypy-boto3-appconfig (==1.17.106)"] +appflow = ["mypy-boto3-appflow (==1.17.106)"] +appintegrations = ["mypy-boto3-appintegrations (==1.17.106)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (==1.17.106)"] +application-insights = ["mypy-boto3-application-insights (==1.17.106)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (==1.17.106)"] +appmesh = ["mypy-boto3-appmesh (==1.17.106)"] +apprunner = ["mypy-boto3-apprunner (==1.17.106)"] +appstream = ["mypy-boto3-appstream (==1.17.106)"] +appsync = ["mypy-boto3-appsync (==1.17.106)"] +athena = ["mypy-boto3-athena (==1.17.106)"] +auditmanager = ["mypy-boto3-auditmanager (==1.17.106)"] +autoscaling = ["mypy-boto3-autoscaling (==1.17.106)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (==1.17.106)"] +backup = ["mypy-boto3-backup (==1.17.106)"] +batch = ["mypy-boto3-batch (==1.17.106)"] +braket = ["mypy-boto3-braket (==1.17.106)"] +budgets = ["mypy-boto3-budgets (==1.17.106)"] +ce = ["mypy-boto3-ce (==1.17.106)"] +chime = ["mypy-boto3-chime (==1.17.106)"] +cloud9 = ["mypy-boto3-cloud9 (==1.17.106)"] +clouddirectory = ["mypy-boto3-clouddirectory (==1.17.106)"] +cloudformation = ["mypy-boto3-cloudformation (==1.17.106)"] +cloudfront = ["mypy-boto3-cloudfront (==1.17.106)"] +cloudhsm = ["mypy-boto3-cloudhsm (==1.17.106)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (==1.17.106)"] +cloudsearch = ["mypy-boto3-cloudsearch (==1.17.106)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (==1.17.106)"] +cloudtrail = ["mypy-boto3-cloudtrail (==1.17.106)"] +cloudwatch = ["mypy-boto3-cloudwatch (==1.17.106)"] +codeartifact = ["mypy-boto3-codeartifact (==1.17.106)"] +codebuild = ["mypy-boto3-codebuild (==1.17.106)"] +codecommit = ["mypy-boto3-codecommit (==1.17.106)"] +codedeploy = ["mypy-boto3-codedeploy (==1.17.106)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (==1.17.106)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (==1.17.106)"] +codepipeline = ["mypy-boto3-codepipeline (==1.17.106)"] +codestar = ["mypy-boto3-codestar (==1.17.106)"] +codestar-connections = ["mypy-boto3-codestar-connections (==1.17.106)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (==1.17.106)"] +cognito-identity = ["mypy-boto3-cognito-identity (==1.17.106)"] +cognito-idp = ["mypy-boto3-cognito-idp (==1.17.106)"] +cognito-sync = ["mypy-boto3-cognito-sync (==1.17.106)"] +comprehend = ["mypy-boto3-comprehend (==1.17.106)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (==1.17.106)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (==1.17.106)"] +config = ["mypy-boto3-config (==1.17.106)"] +connect = ["mypy-boto3-connect (==1.17.106)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (==1.17.106)"] +connectparticipant = ["mypy-boto3-connectparticipant (==1.17.106)"] +cur = ["mypy-boto3-cur (==1.17.106)"] +customer-profiles = ["mypy-boto3-customer-profiles (==1.17.106)"] +databrew = ["mypy-boto3-databrew (==1.17.106)"] +dataexchange = ["mypy-boto3-dataexchange (==1.17.106)"] +datapipeline = ["mypy-boto3-datapipeline (==1.17.106)"] +datasync = ["mypy-boto3-datasync (==1.17.106)"] +dax = ["mypy-boto3-dax (==1.17.106)"] +detective = ["mypy-boto3-detective (==1.17.106)"] +devicefarm = ["mypy-boto3-devicefarm (==1.17.106)"] +devops-guru = ["mypy-boto3-devops-guru (==1.17.106)"] +directconnect = ["mypy-boto3-directconnect (==1.17.106)"] +discovery = ["mypy-boto3-discovery (==1.17.106)"] +dlm = ["mypy-boto3-dlm (==1.17.106)"] +dms = ["mypy-boto3-dms (==1.17.106)"] +docdb = ["mypy-boto3-docdb (==1.17.106)"] +ds = ["mypy-boto3-ds (==1.17.106)"] +dynamodb = ["mypy-boto3-dynamodb (==1.17.106)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (==1.17.106)"] +ebs = ["mypy-boto3-ebs (==1.17.106)"] +ec2 = ["mypy-boto3-ec2 (==1.17.106)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (==1.17.106)"] +ecr = ["mypy-boto3-ecr (==1.17.106)"] +ecr-public = ["mypy-boto3-ecr-public (==1.17.106)"] +ecs = ["mypy-boto3-ecs (==1.17.106)"] +efs = ["mypy-boto3-efs (==1.17.106)"] +eks = ["mypy-boto3-eks (==1.17.106)"] +elastic-inference = ["mypy-boto3-elastic-inference (==1.17.106)"] +elasticache = ["mypy-boto3-elasticache (==1.17.106)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (==1.17.106)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (==1.17.106)"] +elb = ["mypy-boto3-elb (==1.17.106)"] +elbv2 = ["mypy-boto3-elbv2 (==1.17.106)"] +emr = ["mypy-boto3-emr (==1.17.106)"] +emr-containers = ["mypy-boto3-emr-containers (==1.17.106)"] +es = ["mypy-boto3-es (==1.17.106)"] +essential = ["mypy-boto3-cloudformation (==1.17.106)", "mypy-boto3-dynamodb (==1.17.106)", "mypy-boto3-ec2 (==1.17.106)", "mypy-boto3-lambda (==1.17.106)", "mypy-boto3-rds (==1.17.106)", "mypy-boto3-s3 (==1.17.106)", "mypy-boto3-sqs (==1.17.106)"] +events = ["mypy-boto3-events (==1.17.106)"] +finspace = ["mypy-boto3-finspace (==1.17.106)"] +finspace-data = ["mypy-boto3-finspace-data (==1.17.106)"] +firehose = ["mypy-boto3-firehose (==1.17.106)"] +fis = ["mypy-boto3-fis (==1.17.106)"] +fms = ["mypy-boto3-fms (==1.17.106)"] +forecast = ["mypy-boto3-forecast (==1.17.106)"] +forecastquery = ["mypy-boto3-forecastquery (==1.17.106)"] +frauddetector = ["mypy-boto3-frauddetector (==1.17.106)"] +fsx = ["mypy-boto3-fsx (==1.17.106)"] +gamelift = ["mypy-boto3-gamelift (==1.17.106)"] +glacier = ["mypy-boto3-glacier (==1.17.106)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (==1.17.106)"] +glue = ["mypy-boto3-glue (==1.17.106)"] +greengrass = ["mypy-boto3-greengrass (==1.17.106)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (==1.17.106)"] +groundstation = ["mypy-boto3-groundstation (==1.17.106)"] +guardduty = ["mypy-boto3-guardduty (==1.17.106)"] +health = ["mypy-boto3-health (==1.17.106)"] +healthlake = ["mypy-boto3-healthlake (==1.17.106)"] +honeycode = ["mypy-boto3-honeycode (==1.17.106)"] +iam = ["mypy-boto3-iam (==1.17.106)"] +identitystore = ["mypy-boto3-identitystore (==1.17.106)"] +imagebuilder = ["mypy-boto3-imagebuilder (==1.17.106)"] +importexport = ["mypy-boto3-importexport (==1.17.106)"] +inspector = ["mypy-boto3-inspector (==1.17.106)"] +iot = ["mypy-boto3-iot (==1.17.106)"] +iot-data = ["mypy-boto3-iot-data (==1.17.106)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (==1.17.106)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (==1.17.106)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (==1.17.106)"] +iotanalytics = ["mypy-boto3-iotanalytics (==1.17.106)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (==1.17.106)"] +iotevents = ["mypy-boto3-iotevents (==1.17.106)"] +iotevents-data = ["mypy-boto3-iotevents-data (==1.17.106)"] +iotfleethub = ["mypy-boto3-iotfleethub (==1.17.106)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (==1.17.106)"] +iotsitewise = ["mypy-boto3-iotsitewise (==1.17.106)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (==1.17.106)"] +iotwireless = ["mypy-boto3-iotwireless (==1.17.106)"] +ivs = ["mypy-boto3-ivs (==1.17.106)"] +kafka = ["mypy-boto3-kafka (==1.17.106)"] +kendra = ["mypy-boto3-kendra (==1.17.106)"] +kinesis = ["mypy-boto3-kinesis (==1.17.106)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (==1.17.106)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (==1.17.106)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (==1.17.106)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (==1.17.106)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (==1.17.106)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (==1.17.106)"] +kms = ["mypy-boto3-kms (==1.17.106)"] +lakeformation = ["mypy-boto3-lakeformation (==1.17.106)"] +lambda = ["mypy-boto3-lambda (==1.17.106)"] +lex-models = ["mypy-boto3-lex-models (==1.17.106)"] +lex-runtime = ["mypy-boto3-lex-runtime (==1.17.106)"] +lexv2-models = ["mypy-boto3-lexv2-models (==1.17.106)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (==1.17.106)"] +license-manager = ["mypy-boto3-license-manager (==1.17.106)"] +lightsail = ["mypy-boto3-lightsail (==1.17.106)"] +location = ["mypy-boto3-location (==1.17.106)"] +logs = ["mypy-boto3-logs (==1.17.106)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (==1.17.106)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (==1.17.106)"] +lookoutvision = ["mypy-boto3-lookoutvision (==1.17.106)"] +machinelearning = ["mypy-boto3-machinelearning (==1.17.106)"] +macie = ["mypy-boto3-macie (==1.17.106)"] +macie2 = ["mypy-boto3-macie2 (==1.17.106)"] +managedblockchain = ["mypy-boto3-managedblockchain (==1.17.106)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (==1.17.106)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (==1.17.106)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (==1.17.106)"] +mediaconnect = ["mypy-boto3-mediaconnect (==1.17.106)"] +mediaconvert = ["mypy-boto3-mediaconvert (==1.17.106)"] +medialive = ["mypy-boto3-medialive (==1.17.106)"] +mediapackage = ["mypy-boto3-mediapackage (==1.17.106)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (==1.17.106)"] +mediastore = ["mypy-boto3-mediastore (==1.17.106)"] +mediastore-data = ["mypy-boto3-mediastore-data (==1.17.106)"] +mediatailor = ["mypy-boto3-mediatailor (==1.17.106)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (==1.17.106)"] +mgh = ["mypy-boto3-mgh (==1.17.106)"] +mgn = ["mypy-boto3-mgn (==1.17.106)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (==1.17.106)"] +mobile = ["mypy-boto3-mobile (==1.17.106)"] +mq = ["mypy-boto3-mq (==1.17.106)"] +mturk = ["mypy-boto3-mturk (==1.17.106)"] +mwaa = ["mypy-boto3-mwaa (==1.17.106)"] +neptune = ["mypy-boto3-neptune (==1.17.106)"] +network-firewall = ["mypy-boto3-network-firewall (==1.17.106)"] +networkmanager = ["mypy-boto3-networkmanager (==1.17.106)"] +nimble = ["mypy-boto3-nimble (==1.17.106)"] +opsworks = ["mypy-boto3-opsworks (==1.17.106)"] +opsworkscm = ["mypy-boto3-opsworkscm (==1.17.106)"] +organizations = ["mypy-boto3-organizations (==1.17.106)"] +outposts = ["mypy-boto3-outposts (==1.17.106)"] +personalize = ["mypy-boto3-personalize (==1.17.106)"] +personalize-events = ["mypy-boto3-personalize-events (==1.17.106)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (==1.17.106)"] +pi = ["mypy-boto3-pi (==1.17.106)"] +pinpoint = ["mypy-boto3-pinpoint (==1.17.106)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (==1.17.106)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (==1.17.106)"] +polly = ["mypy-boto3-polly (==1.17.106)"] +pricing = ["mypy-boto3-pricing (==1.17.106)"] +proton = ["mypy-boto3-proton (==1.17.106)"] +qldb = ["mypy-boto3-qldb (==1.17.106)"] +qldb-session = ["mypy-boto3-qldb-session (==1.17.106)"] +quicksight = ["mypy-boto3-quicksight (==1.17.106)"] +ram = ["mypy-boto3-ram (==1.17.106)"] +rds = ["mypy-boto3-rds (==1.17.106)"] +rds-data = ["mypy-boto3-rds-data (==1.17.106)"] +redshift = ["mypy-boto3-redshift (==1.17.106)"] +redshift-data = ["mypy-boto3-redshift-data (==1.17.106)"] +rekognition = ["mypy-boto3-rekognition (==1.17.106)"] +resource-groups = ["mypy-boto3-resource-groups (==1.17.106)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (==1.17.106)"] +robomaker = ["mypy-boto3-robomaker (==1.17.106)"] +route53 = ["mypy-boto3-route53 (==1.17.106)"] +route53domains = ["mypy-boto3-route53domains (==1.17.106)"] +route53resolver = ["mypy-boto3-route53resolver (==1.17.106)"] +s3 = ["mypy-boto3-s3 (==1.17.106)"] +s3control = ["mypy-boto3-s3control (==1.17.106)"] +s3outposts = ["mypy-boto3-s3outposts (==1.17.106)"] +sagemaker = ["mypy-boto3-sagemaker (==1.17.106)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (==1.17.106)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (==1.17.106)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (==1.17.106)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (==1.17.106)"] +savingsplans = ["mypy-boto3-savingsplans (==1.17.106)"] +schemas = ["mypy-boto3-schemas (==1.17.106)"] +sdb = ["mypy-boto3-sdb (==1.17.106)"] +secretsmanager = ["mypy-boto3-secretsmanager (==1.17.106)"] +securityhub = ["mypy-boto3-securityhub (==1.17.106)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (==1.17.106)"] +service-quotas = ["mypy-boto3-service-quotas (==1.17.106)"] +servicecatalog = ["mypy-boto3-servicecatalog (==1.17.106)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (==1.17.106)"] +servicediscovery = ["mypy-boto3-servicediscovery (==1.17.106)"] +ses = ["mypy-boto3-ses (==1.17.106)"] +sesv2 = ["mypy-boto3-sesv2 (==1.17.106)"] +shield = ["mypy-boto3-shield (==1.17.106)"] +signer = ["mypy-boto3-signer (==1.17.106)"] +sms = ["mypy-boto3-sms (==1.17.106)"] +sms-voice = ["mypy-boto3-sms-voice (==1.17.106)"] +snowball = ["mypy-boto3-snowball (==1.17.106)"] +sns = ["mypy-boto3-sns (==1.17.106)"] +sqs = ["mypy-boto3-sqs (==1.17.106)"] +ssm = ["mypy-boto3-ssm (==1.17.106)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (==1.17.106)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (==1.17.106)"] +sso = ["mypy-boto3-sso (==1.17.106)"] +sso-admin = ["mypy-boto3-sso-admin (==1.17.106)"] +sso-oidc = ["mypy-boto3-sso-oidc (==1.17.106)"] +stepfunctions = ["mypy-boto3-stepfunctions (==1.17.106)"] +storagegateway = ["mypy-boto3-storagegateway (==1.17.106)"] +sts = ["mypy-boto3-sts (==1.17.106)"] +support = ["mypy-boto3-support (==1.17.106)"] +swf = ["mypy-boto3-swf (==1.17.106)"] +synthetics = ["mypy-boto3-synthetics (==1.17.106)"] +textract = ["mypy-boto3-textract (==1.17.106)"] +timestream-query = ["mypy-boto3-timestream-query (==1.17.106)"] +timestream-write = ["mypy-boto3-timestream-write (==1.17.106)"] +transcribe = ["mypy-boto3-transcribe (==1.17.106)"] +transfer = ["mypy-boto3-transfer (==1.17.106)"] +translate = ["mypy-boto3-translate (==1.17.106)"] +waf = ["mypy-boto3-waf (==1.17.106)"] +waf-regional = ["mypy-boto3-waf-regional (==1.17.106)"] +wafv2 = ["mypy-boto3-wafv2 (==1.17.106)"] +wellarchitected = ["mypy-boto3-wellarchitected (==1.17.106)"] +workdocs = ["mypy-boto3-workdocs (==1.17.106)"] +worklink = ["mypy-boto3-worklink (==1.17.106)"] +workmail = ["mypy-boto3-workmail (==1.17.106)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (==1.17.106)"] +workspaces = ["mypy-boto3-workspaces (==1.17.106)"] +xray = ["mypy-boto3-xray (==1.17.106)"] [[package]] name = "botocore" -version = "1.20.103" +version = "1.20.106" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -408,8 +408,8 @@ crt = ["awscrt (==0.11.24)"] [[package]] name = "botocore-stubs" -version = "1.20.103.post1" -description = "Type annotations for botocore 1.20.103, generated by mypy-boto3-buider 4.22.1" +version = "1.20.106" +description = "Type annotations for botocore 1.20.106, generated by mypy-boto3-buider 4.22.1" category = "dev" optional = false python-versions = ">=3.6" @@ -605,7 +605,7 @@ xlrd = ">=1.0.0,<2.0.0" [[package]] name = "dcicutils" -version = "1.15.0" +version = "1.19.0" description = "Utility package for interacting with the 4DN Data Portal and other 4DN resources" category = "main" optional = false @@ -866,7 +866,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.6.0" +version = "4.6.1" description = "Read metadata from Python packages" category = "dev" optional = false @@ -1155,7 +1155,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pip-licenses" -version = "3.4.0" +version = "3.5.1" description = "Dump the software license list of Python packages installed with pip." category = "dev" optional = false @@ -1868,12 +1868,15 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tqdm" -version = "4.61.1" +version = "4.61.2" description = "Fast, Extensible Progress Meter" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [package.extras] dev = ["py-make (>=0.1.0)", "twine", "wheel"] notebook = ["ipywidgets (>=6)"] @@ -1956,6 +1959,17 @@ python-versions = "*" docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"] testing = ["nose", "coverage"] +[[package]] +name = "watchtower" +version = "1.0.6" +description = "Python CloudWatch Logging" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +boto3 = ">=1.9.253,<2" + [[package]] name = "webob" version = "1.8.7" @@ -2044,7 +2058,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "zipp" -version = "3.4.1" +version = "3.5.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false @@ -2052,7 +2066,7 @@ python-versions = ">=3.6" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [[package]] name = "zope.deprecation" @@ -2098,7 +2112,7 @@ test = ["zope.testing"] [metadata] lock-version = "1.1" python-versions = ">=3.6.1,<3.7" -content-hash = "3409cb8782747ef072e7e9550ff1984d603279fd4dc7cdbc9404da05029f6bb2" +content-hash = "02e53edb0cec30a770bb6fc35bd91cc5216a7729fe0f236f9b42cd5a9d41d971" [metadata.files] apipkg = [ @@ -2131,7 +2145,7 @@ beautifulsoup4 = [ {file = "beautifulsoup4-4.9.3.tar.gz", hash = "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25"}, ] bitarray = [ - {file = "bitarray-2.1.3.tar.gz", hash = "sha256:a24aff72a7f1b09571b5daf9dbfcffd98481be1fe085ae5ef662cf11452a97e0"}, + {file = "bitarray-2.2.1.tar.gz", hash = "sha256:31b40d716a1f0642ea9e2741c29b756299075db2e1d1ebe750e3e2c1469f589d"}, ] boto = [ {file = "boto-2.49.0-py2.py3-none-any.whl", hash = "sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8"}, @@ -2142,16 +2156,16 @@ boto3 = [ {file = "boto3-1.17.53.tar.gz", hash = "sha256:1d26f6e7ae3c940cb07119077ac42485dcf99164350da0ab50d0f5ad345800cd"}, ] boto3-stubs = [ - {file = "boto3-stubs-1.17.103.post1.tar.gz", hash = "sha256:715ce7c1e5e1468d90d6633e2a8847ceea160c628fdcf0f9566e61596fd701d8"}, - {file = "boto3_stubs-1.17.103.post1-py3-none-any.whl", hash = "sha256:9dfc04e7a35ecad684e40e0f3357d485c5db2958fc09e4765fa3e556b3734b1d"}, + {file = "boto3-stubs-1.17.106.tar.gz", hash = "sha256:3cade7f8df4c2d15b9d5c47a9753e19165dab1946e49647b47800c216db2ac03"}, + {file = "boto3_stubs-1.17.106-py3-none-any.whl", hash = "sha256:4973122e8b3d92c10742ac165e08e86965722353f022fab97db41d64b0a18cd2"}, ] botocore = [ - {file = "botocore-1.20.103-py2.py3-none-any.whl", hash = "sha256:5b39773056a94f85e884a658a5126bb4fee957e31d98b69c255b137eb9f11d6b"}, - {file = "botocore-1.20.103.tar.gz", hash = "sha256:afbfe10fcd580224016d652330db21e7d89099181a437c9ec588b5b7cb3ea644"}, + {file = "botocore-1.20.106-py2.py3-none-any.whl", hash = "sha256:47ec01b20c4bc6aaa16d21f756ead2f437b47c1335b083356cdc874e9140b023"}, + {file = "botocore-1.20.106.tar.gz", hash = "sha256:6d5c983808b1d00437f56d0c08412bd82d9f8012fdb77e555f97277a1fd4d5df"}, ] botocore-stubs = [ - {file = "botocore-stubs-1.20.103.post1.tar.gz", hash = "sha256:d66554becc1e44572e197f769883214382d79cf24d5fa3e6670fddf13f3506a4"}, - {file = "botocore_stubs-1.20.103.post1-py3-none-any.whl", hash = "sha256:52b605546ba546c9c72572a11c0a307a25a65d33c42e03fa20dc97bde8d8c71b"}, + {file = "botocore-stubs-1.20.106.tar.gz", hash = "sha256:a2a51cca978510220b8830bf4578a9e5a0a18b169c50093df19e561d738b05d5"}, + {file = "botocore_stubs-1.20.106-py3-none-any.whl", hash = "sha256:a8d0784c93eedaf076403be159f9bdb433569d03428574b6940c742d7ebd8469"}, ] cached-property = [ {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, @@ -2311,8 +2325,8 @@ dcicsnovault = [ {file = "dcicsnovault-4.8.1.tar.gz", hash = "sha256:e229f54f09eb68a40d90172cefc0fecc580890867b6b32e63817e3ca59525a91"}, ] dcicutils = [ - {file = "dcicutils-1.15.0-py3-none-any.whl", hash = "sha256:1ad2ce8cdae57e42fee5506669af24dc2c181a3188dd07743aba93a87024d04c"}, - {file = "dcicutils-1.15.0.tar.gz", hash = "sha256:e073cbaee5df83996fafd095620473bad9b31f1be8105a58d7ed39a0077877ee"}, + {file = "dcicutils-1.19.0-py3-none-any.whl", hash = "sha256:af6e2b13ea92857140fe98a18a06bfe41e7fdd1c4c065e0f74368aed5b2983c4"}, + {file = "dcicutils-1.19.0.tar.gz", hash = "sha256:883bbaa442a6c410fe1278cc39d2b0b0d808be2e14cb55e9adc1751e3bd232f1"}, ] docker = [ {file = "docker-4.4.4-py2.py3-none-any.whl", hash = "sha256:f3607d5695be025fa405a12aca2e5df702a57db63790c73b927eb6a94aac60af"}, @@ -2405,8 +2419,8 @@ idna = [ {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.6.0-py3-none-any.whl", hash = "sha256:c6513572926a96458f8c8f725bf0e00108fba0c9583ade9bd15b869c9d726e33"}, - {file = "importlib_metadata-4.6.0.tar.gz", hash = "sha256:4a5611fea3768d3d967c447ab4e93f567d95db92225b43b7b238dbfb855d70bb"}, + {file = "importlib_metadata-4.6.1-py3-none-any.whl", hash = "sha256:9f55f560e116f8643ecf2922d9cd3e1c7e8d52e683178fecd9d08f6aa357e11e"}, + {file = "importlib_metadata-4.6.1.tar.gz", hash = "sha256:079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac"}, ] importlib-resources = [ {file = "importlib_resources-5.2.0-py3-none-any.whl", hash = "sha256:a0143290bef3cbc99de9e40176e4987780939a955b8632f02ce6c935f42e9bfc"}, @@ -2611,8 +2625,8 @@ pillow = [ {file = "Pillow-6.2.2.tar.gz", hash = "sha256:db9ff0c251ed066d367f53b64827cc9e18ccea001b986d08c265e53625dab950"}, ] pip-licenses = [ - {file = "pip-licenses-3.4.0.tar.gz", hash = "sha256:c5e48b312bdd296154daaf04f24f473715a3c77b2c359f3737377b9fb31aaf8c"}, - {file = "pip_licenses-3.4.0-py3-none-any.whl", hash = "sha256:bdebcc46c5972a5dc7ee0ef5c6cf6a1bc5d63d7466e23325fc4090b33cc58c00"}, + {file = "pip-licenses-3.5.1.tar.gz", hash = "sha256:6c60096cfa1ee04d7db8d374d4326939369d5871368421e7a7d5da026519bc24"}, + {file = "pip_licenses-3.5.1-py3-none-any.whl", hash = "sha256:e44e4e92e6af2b9d11dfafbf22a3052bf9a05fdaa5654befdba9db735832ac7f"}, ] pipdeptree = [ {file = "pipdeptree-2.0.0-py2-none-any.whl", hash = "sha256:6899ba160bc7db98f0124d1aa6a680aa578adbac8558177ae66dd81bf69369de"}, @@ -3011,8 +3025,8 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tqdm = [ - {file = "tqdm-4.61.1-py2.py3-none-any.whl", hash = "sha256:aa0c29f03f298951ac6318f7c8ce584e48fa22ec26396e6411e43d038243bdb2"}, - {file = "tqdm-4.61.1.tar.gz", hash = "sha256:24be966933e942be5f074c29755a95b315c69a91f839a29139bf26ffffe2d3fd"}, + {file = "tqdm-4.61.2-py2.py3-none-any.whl", hash = "sha256:5aa445ea0ad8b16d82b15ab342de6b195a722d75fc1ef9934a46bba6feafbc64"}, + {file = "tqdm-4.61.2.tar.gz", hash = "sha256:8bb94db0d4468fea27d004a0f1d1c02da3cdedc00fe491c0de986b76a04d6b0a"}, ] transaction = [ {file = "transaction-2.4.0-py2.py3-none-any.whl", hash = "sha256:b96a5e9aaa73f905759bc9ccf0021bf4864c01ac36666e0d28395e871f6d584a"}, @@ -3042,6 +3056,10 @@ waitress = [ {file = "waitress-1.2.0-py2.py3-none-any.whl", hash = "sha256:8b8c8686f628a635b9747e3014a0ab19cf9cf95c5c36eb3331ae355a462ee602"}, {file = "waitress-1.2.0.tar.gz", hash = "sha256:e624c829656ffc99b33d661072b2814885ae92835cf835ee8ab283ddb7c915b9"}, ] +watchtower = [ + {file = "watchtower-1.0.6-py3-none-any.whl", hash = "sha256:2859275df4ad71b005b983613dd64cabbda61f9fdd3db7600753fc465090119d"}, + {file = "watchtower-1.0.6.tar.gz", hash = "sha256:5eb5d78e730e1016e166b14a79a02d1b939cf1a58f2d559ff4f7c6f953284ebf"}, +] webob = [ {file = "WebOb-1.8.7-py2.py3-none-any.whl", hash = "sha256:73aae30359291c14fa3b956f8b5ca31960e420c28c1bec002547fb04928cf89b"}, {file = "WebOb-1.8.7.tar.gz", hash = "sha256:b64ef5141be559cfade448f044fa45c2260351edcb6a8ef6b7e00c7dcef0c323"}, @@ -3073,8 +3091,8 @@ xmltodict = [ {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, ] zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, + {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, + {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, ] "zope.deprecation" = [ {file = "zope.deprecation-4.4.0-py2.py3-none-any.whl", hash = "sha256:f1480b74995958b24ce37b0ef04d3663d2683e5d6debc96726eff18acf4ea113"}, diff --git a/pyproject.toml b/pyproject.toml index 1edc49b704..17849ebf65 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] # Note: Various modules refer to this system as "encoded", not "cgap-portal". name = "encoded" -version = "6.8.11" +version = "7.0.0" description = "Clinical Genomics Analysis Platform" authors = ["4DN-DCIC Team "] license = "MIT" @@ -37,13 +37,13 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.6.1,<3.7" boto3 = "^1.17.32" -botocore = "^1.20.32" +botocore = "^1.20.86" certifi = ">=2020.11.8" chardet = "3.0.4" colorama = "0.3.3" dcicpyvcf = "1.0.0" dcicsnovault = "^4.8.1" -dcicutils = "1.15.0" +dcicutils = "^1.19.0" elasticsearch = "6.8.1" execnet = "1.4.1" future = "^0.15.2" @@ -114,6 +114,7 @@ WSGIProxy2 = "0.4.2" sentry-sdk = "^0.16.5" granite-suite = "0.1.11b0" openpyxl = "^3.0.7" +watchtower = "^1.0.6" [tool.poetry.dev-dependencies] # PyCharm says boto3-stubs contains useful type hints @@ -160,7 +161,6 @@ pytest-xdist = ">=1.14" # snovault commands batchupgrade = "snovault.batchupgrade:main" create-mapping = "snovault.elasticsearch.create_mapping:main" -es-index-listener = "snovault.elasticsearch.es_index_listener:main" wipe-test-indices = "snovault.commands.wipe_test_indices:main" # encoded commands add-date-created = "encoded.commands.add_date_created:main" @@ -170,7 +170,7 @@ configure-kibana-index = "encoded.commands.configure_kibana_index:main" create-mapping-on-deploy = "encoded.commands.create_mapping_on_deploy:main" dev-servers = "encoded.dev_servers:main" dis2pheno = "encoded.commands.parse_hpoa:main" -es-index-data = "encoded.commands.es_index_data:main" +es-index-data = "snovault.commands.es_index_data:main" export-data = "encoded.commands.export_data:main" extract-test-data = "encoded.commands.extract_test_data:main" import-data = "encoded.commands.import_data:main" diff --git a/scripts/build-docker-test b/scripts/build-docker-test new file mode 100755 index 0000000000..3b58bc9550 --- /dev/null +++ b/scripts/build-docker-test @@ -0,0 +1,61 @@ +#!/bin/bash -f + +aws_account= +env_name= +do_login= +do_help= +creds_file=$HOME/.aws_test/test_creds.sh + +if [ -f "${creds_file}" ]; then + # Default the values of these variables by peeking in the test_creds.sh script + aws_account=`grep 'export ACCOUNT_NUMBER=' ${creds_file} | sed -E 's|^export ACCOUNT_NUMBER=(.*)$|\1|'` + env_name=`grep 'export ENV_NAME=' ${creds_file} | sed -E 's|^export ENV_NAME=(.*)$|\1|'` +fi + +while [ $# -gt 0 ]; do + if [ "$1" = "--aws_account" ]; then + aws_account=$2 + shift 2 + elif [ "$1" = "--env_name" ]; then + env_name=$2 + shift 2 + elif [ "$1" = "--login" ]; then + do_login=TRUE + shift 1 + elif [ "$1" = "--help" ]; then + do_help=TRUE + shift 1 + else + do_help=TRUE + break + fi +done + +if [ -n "${do_help}" ]; then + echo "Syntax: $0 { --aws_account | --env_name | --login | --help }" + echo "" + echo " This will execute 'make build-docker-production AWS_ACCOUNT= ENV_NAME='." + echo " If --login is given, 'make ecr-login AWS_ACCOUNT=' will be done first." + echo " If unspecified, defaults to '${aws_account}' (from 'export ACCOUNT_NUMBER=...' in ${creds_file})." + echo " If unspecified, defaults to '${env_name}' (from 'export ENV_NAME=...' in ${creds_file}.)" + if [ ! -f "${creds_file}" ]; then + echo " NOTE: The file ${creds_file} does not exist." + fi + echo "" + exit 1 +fi + +if [ -z "${aws_account}" ]; then + echo "--aws_account was not given to $0 and could not be found in ~/.aws_test/test_creds.sh" + exit 1 +fi + +if [ -z "${env_name}" ]; then + echo "--env_name was not given to $0 and could not be found in ~/.aws_test/test_creds.sh." + exit 1 +fi + +if [ -n "${do_login}" ]; then + make ecr-login AWS_ACCOUNT="${aws_account}" +fi +make build-docker-production AWS_ACCOUNT="${aws_account}" ENV_NAME="${env_name}" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index ad769b40ac..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,16 +0,0 @@ -[flake8] -max-line-length = 160 -[coverage:run] -branch = True -omit = - */*env/* - */site-packages/* - */.cache/* - */.git/* - */.idea/* - */*.egg-info/* - */encode_schemas/* - */encode_types/* - */tests/* - */docs/* - */commands/* diff --git a/setup_eb.py b/setup_eb.py index 86cdfa45c5..b67be46168 100644 --- a/setup_eb.py +++ b/setup_eb.py @@ -65,9 +65,8 @@ def entry_points(): PACKAGE_NAME = POETRY_DATA['name'] README = open(os.path.join(ROOT_DIR, 'README.rst')).read() -CHANGES = open(os.path.join(ROOT_DIR, 'CHANGES.rst')).read() DESCRIPTION = POETRY_DATA['description'] -LONG_DESCRIPTION = README + '\n\n' + CHANGES +LONG_DESCRIPTION = README AUTHOR, AUTHOR_EMAIL = author_and_email(POETRY_DATA['authors'][0]) URL = 'http://data.4dnucleome.org' LICENSE = 'MIT' diff --git a/src/encoded/__init__.py b/src/encoded/__init__.py index 4d7956c364..6c87443915 100644 --- a/src/encoded/__init__.py +++ b/src/encoded/__init__.py @@ -31,10 +31,6 @@ raise EnvironmentError("The CGAP encoded library no longer supports Python 2.") -# location of environment variables on elasticbeanstalk -BEANSTALK_ENV_PATH = "/opt/python/current/env" - - def static_resources(config): mimetypes.init() mimetypes.init([pkg_resources.resource_filename('encoded', 'static/mime.types')]) @@ -95,18 +91,7 @@ def app_version(config): if not config.registry.settings.get(APP_VERSION_REGISTRY_KEY): # we update version as part of deployment process `deploy_beanstalk.py` # but if we didn't check env then git - version = os.environ.get("ENCODED_VERSION") - if not version: - try: - version = subprocess.check_output( - ['git', '-C', os.path.dirname(__file__), 'describe']).decode('utf-8').strip() - diff = subprocess.check_output( - ['git', '-C', os.path.dirname(__file__), 'diff', '--no-ext-diff']) - if diff: - version += '-patch' + hashlib.sha1(diff).hexdigest()[:7] - except Exception: - version = "test" - + version = os.environ.get("ENCODED_VERSION", "test") config.registry.settings[APP_VERSION_REGISTRY_KEY] = version # Fourfront does GA stuff here that makes no sense in CGAP (yet). diff --git a/src/encoded/authentication.py b/src/encoded/authentication.py index f1c189c224..753bb957de 100644 --- a/src/encoded/authentication.py +++ b/src/encoded/authentication.py @@ -2,6 +2,7 @@ import os from operator import itemgetter import jwt +import datetime from base64 import b64decode from passlib.context import CryptContext @@ -34,6 +35,7 @@ CONNECTION, COLLECTIONS ) +from dateutil.parser import isoparse from dcicutils.misc_utils import remove_element from snovault.validation import ValidationFailure from snovault.calculated import calculate_properties @@ -251,9 +253,9 @@ def get_token_info(token, request): request.set_property(lambda r: False, 'auth0_expired') return payload - else: # we don't have the key, let auth0 do the work for us + else: # we don't have the key, let auth0 do the work for us user_url = "https://{domain}/tokeninfo".format(domain='hms-dbmi.auth0.com') - resp = requests.post(user_url, {'id_token':token}) + resp = requests.post(user_url, {'id_token':token}) payload = resp.json() if 'email' in payload and Auth0AuthenticationPolicy.email_is_partners_or_hms(payload): request.set_property(lambda r: False, 'auth0_expired') @@ -261,8 +263,8 @@ def get_token_info(token, request): except (ValueError, jwt.exceptions.InvalidTokenError, jwt.exceptions.InvalidKeyError) as e: # Catch errors from decoding JWT - print('Invalid JWT assertion : %s (%s)', (e, type(e).__name__)) - request.set_property(lambda r: True, 'auth0_expired') # Allow us to return 403 code &or unset cookie in renderers.py + print('Invalid JWT assertion : %s (%s)' % (e, type(e).__name__)) + request.set_property(lambda r: True, 'auth0_expired') # Allow us to return 403 code &or unset cookie in renderers.py return None print("didn't get email or email is not verified") @@ -313,8 +315,7 @@ def login(context, request): if request_token is None: request_token = request.json_body.get("id_token", None) - - is_https = request.scheme == "https" + is_https = (request.scheme == "https") request.response.set_cookie( "jwtToken", @@ -438,6 +439,10 @@ def session_properties(context, request): def basic_auth_check(username, password, request): + """ This function implements the functionality that does the actual checking of the + access key against what is in the database. It is thus very important. Access + key expiration is implemented here - auth will fail if it has expired + """ # We may get called before the context is found and the root set root = request.registry[ROOT] collection = root['access-keys'] @@ -446,19 +451,24 @@ def basic_auth_check(username, password, request): except KeyError: return None + # Check expiration first + # Note that access keys generated awhile ago will remain valid (for now) - will 6/14/21 properties = access_key.properties - hash = properties['secret_access_key_hash'] + expiration_date = properties.get('expiration_date') + if expiration_date: + dt = isoparse(expiration_date) # datetime.date.fromisoformat in Python3.7 + now = datetime.datetime.utcnow() + if now > dt: + return None + # If expiration valid, check hash + hash = properties['secret_access_key_hash'] crypt_context = request.registry[CRYPT_CONTEXT] valid = crypt_context.verify(password, hash) if not valid: return None - #valid, new_hash = crypt_context.verify_and_update(password, hash) - #if new_hash: - # replace_user_hash(user, new_hash) - - return [] + return [] # success @view_config(route_name='impersonate-user', request_method='POST', diff --git a/src/encoded/commands/load_access_keys.py b/src/encoded/commands/load_access_keys.py index 3d365718ba..b34ba2faf1 100644 --- a/src/encoded/commands/load_access_keys.py +++ b/src/encoded/commands/load_access_keys.py @@ -8,6 +8,8 @@ from webtest import AppError from dcicutils.misc_utils import TestApp from dcicutils.beanstalk_utils import get_beanstalk_real_url +from dcicutils.cloudformation_utils import get_ecs_real_url +from dcicutils.secrets_utils import assume_identity log = structlog.getLogger(__name__) EPILOG = __doc__ @@ -44,7 +46,7 @@ def get_existing_key_ids(testapp, user_uuid, key_desc): def generate_access_key(testapp, env, user_uuid, description): """ - Generate an access key for given user on given environment. + Generate an access for given user on given environment. Args: testapp (webtest.TestApp): current TestApp @@ -55,7 +57,12 @@ def generate_access_key(testapp, env, user_uuid, description): Returns: dict: access key contents with server """ - server = get_beanstalk_real_url(env) + try: + server = get_ecs_real_url(env) # try to grab from Cfn, if we are ECS env + except Exception: + server = get_beanstalk_real_url(env) + if not server: + server = get_beanstalk_real_url(env) access_key_req = {'user': user_uuid, 'description': description} res = testapp.post_json('/access_key', access_key_req).json return {'secret': res['secret_access_key'], @@ -84,6 +91,8 @@ def main(): ) parser.add_argument('config_uri', help='path to configfile') parser.add_argument('--app-name', help='Pyramid app name in configfile') + parser.add_argument('--secret-name', help='name of application identity stored in secrets manager within which' + 'to locate S3_ENCRYPT_KEY, for example: dev/beanstalk/cgap-dev') args = parser.parse_args() app = get_app(args.config_uri, args.app_name) @@ -97,9 +106,16 @@ def main(): if not env: raise RuntimeError('load_access_keys: cannot find env.name in settings') - encrypt_key = os.environ.get('S3_ENCRYPT_KEY') + # Resolve secret from environment if one is not specified + encrypt_key = None + if args.secret_name is not None: + identity = assume_identity() # automatically detects GLOBAL_APPLICATION_CONFIGURATION + encrypt_key = identity.get('S3_ENCRYPT_KEY', None) # one of the secrets + if not encrypt_key: + encrypt_key = os.environ.get('S3_ENCRYPT_KEY') + if not encrypt_key: - raise RuntimeError('load_access_keys: must define S3_ENCRYPT_KEY in env') + raise RuntimeError('load_access_keys: must define S3_ENCRYPT_KEY in env or in GAC') # will need to use a dynamic region at some point (not just here) s3 = boto3.client('s3', region_name='us-east-1') @@ -111,7 +127,7 @@ def main(): ('foursight.app@gmail.com', 'access_key_foursight')] for email, key_name in to_generate: try: - user_props = testapp.get('/users/%s?datastore=database' % (email)).follow().json + user_props = testapp.get('/users/%s?datastore=database' % email).follow().json except Exception as exc: log.error('load_access_keys: could not get user %s. Exception: %s' % (email, exc)) continue diff --git a/src/encoded/commands/update_inserts_from_server.py b/src/encoded/commands/update_inserts_from_server.py index e6c8622c8e..64a41bfa65 100644 --- a/src/encoded/commands/update_inserts_from_server.py +++ b/src/encoded/commands/update_inserts_from_server.py @@ -163,6 +163,5 @@ def main(): (len(svr_inserts[item_type]), item_type + '.json')) - if __name__ == "__main__": main() diff --git a/src/encoded/ingestion/queue_utils.py b/src/encoded/ingestion/queue_utils.py index c48bd9e3f8..4a22c64bf1 100644 --- a/src/encoded/ingestion/queue_utils.py +++ b/src/encoded/ingestion/queue_utils.py @@ -16,13 +16,15 @@ class IngestionQueueManager: class and QueueManager should be refactored into a "helper" class, but for now this is sufficient and is tested independently here. - We will use a single queue to keep track of VCF File uuids to be indexed. + We will use a single queue to keep track of File uuids to be indexed. This used to manage only VCFs + but now the Ingestion functionality is generic and can be extended to arbitrary processing on + any type. """ - BUCKET_EXTENSION = '-vcfs' + BUCKET_EXTENSION = '-ingestion-queue' # XXX: breaking change, matches 4dn-cloud-infra resources def __init__(self, registry, override_name=None): """ Does initial setup for interacting with SQS """ - self.batch_size = 10 + self.batch_size = 1 # NOTE: this value is important because we don't want to block other jobs self.env_name = registry.settings.get('env.name', None) if not self.env_name: # replace with something usable backup = socket.gethostname()[:80].replace('.', '-') diff --git a/src/encoded/ingestion/variant_utils.py b/src/encoded/ingestion/variant_utils.py index 069ff5d5d5..7b09a8a823 100644 --- a/src/encoded/ingestion/variant_utils.py +++ b/src/encoded/ingestion/variant_utils.py @@ -101,6 +101,7 @@ def _post_or_patch_variant_sample(self, variant_sample, variant_uuid): except Exception as e: # noqa exceptions thrown by the above call are not reported correctly log.info('Exception encountered on variant_sample post (attempting patch): %s' % e) self.vapp.patch_json('/variant_sample/%s' % + build_variant_sample_annotation_id(variant_sample['CALL_INFO'], variant_uuid, self.file), variant_sample, diff --git a/src/encoded/ingestion/vcf_utils.py b/src/encoded/ingestion/vcf_utils.py index 28f59e5f2c..25086cc9d9 100644 --- a/src/encoded/ingestion/vcf_utils.py +++ b/src/encoded/ingestion/vcf_utils.py @@ -391,7 +391,7 @@ def cast_field_value(self, t, value, sub_type=None): else: raise VCFParserException('Got array with no sub-type') else: - raise VCFParserException('Type was %s and not one of: string, integer, number, boolean, array' % type) + raise VCFParserException('Type was %s and not one of: string, integer, number, boolean, array' % t) def validate_variant_value(self, field, value, key='', exit_on_validation=False): """ Given a field, check the variant schema for the type of that field and cast diff --git a/src/encoded/ingestion_listener.py b/src/encoded/ingestion_listener.py index 3b6a976813..4bea1d000b 100644 --- a/src/encoded/ingestion_listener.py +++ b/src/encoded/ingestion_listener.py @@ -580,7 +580,8 @@ def discard(msg): # report results in error_log regardless of status msg = variant_builder.ingestion_report.brief_summary() log.error(msg) - self.update_status(msg=msg) + if self.update_status is not None and callable(self.update_status): + self.update_status(msg=msg) # if we had no errors, patch the file status to 'Ingested' if error > 0: diff --git a/src/encoded/loadxl.py b/src/encoded/loadxl.py index 602d7ee4da..1a40a156a6 100644 --- a/src/encoded/loadxl.py +++ b/src/encoded/loadxl.py @@ -7,6 +7,7 @@ import os import structlog import webtest +import traceback from base64 import b64encode from dcicutils.misc_utils import ignored @@ -464,6 +465,7 @@ def load_all_gen(testapp, inserts, docsdir, overwrite=True, itype=None, from_jso except Exception as e: print('Patching {} failed. Patch body:\n{}\n\nError Message:\n{}'.format( a_type, str(an_item), str(e))) + print('Full error: %s' % traceback.format_exc()) e_str = str(e).replace('\n', '') # import pdb; pdb.set_trace() yield str.encode('ERROR: %s\n' % e_str) diff --git a/src/encoded/schemas/access_key.json b/src/encoded/schemas/access_key.json index ee00229f35..123ddd74c7 100644 --- a/src/encoded/schemas/access_key.json +++ b/src/encoded/schemas/access_key.json @@ -53,6 +53,12 @@ "title": "Secret access key Hash", "comment": "Only admins are allowed to set this value.", "type": "string" + }, + "expiration_date": { + "Title": "Expiration Date", + "comment": "Only admins are allowed to set this value.", + "type": "string", + "permission": "restricted_fields" } }, "facets": { diff --git a/src/encoded/tests/conftest.py b/src/encoded/tests/conftest.py index 398c112371..791daab4d4 100644 --- a/src/encoded/tests/conftest.py +++ b/src/encoded/tests/conftest.py @@ -13,7 +13,7 @@ from dcicutils.qa_utils import notice_pytest_fixtures, MockFileSystem from pyramid.request import apply_request_extensions -from pyramid.testing import DummyRequest # , setUp, tearDown +from pyramid.testing import DummyRequest from pyramid.threadlocal import get_current_registry, manager as threadlocal_manager from snovault import DBSESSION, ROOT, UPGRADER from snovault.elasticsearch import ELASTIC_SEARCH, create_mapping @@ -42,7 +42,6 @@ def app_settings(request, wsgi_server_host_port, conn, DBSession): # noQA - We notice_pytest_fixtures(request, wsgi_server_host_port, conn, DBSession) settings = make_app_settings_dictionary() settings['auth0.audiences'] = 'http://%s:%s' % wsgi_server_host_port - # add some here for file testing settings[DBSESSION] = DBSession return settings diff --git a/src/encoded/tests/data/master-inserts/higlass_view_config.json b/src/encoded/tests/data/master-inserts/higlass_view_config.json deleted file mode 100644 index d16dfd052a..0000000000 --- a/src/encoded/tests/data/master-inserts/higlass_view_config.json +++ /dev/null @@ -1,860 +0,0 @@ -[ - { - "viewconfig":{ - "editable":true, - "zoomFixed":false, - "exportViewUrl":"/api/v1/viewconfs", - "trackSourceServers": ["https://cgap-higlass.com/api/v1"], - "views":[ - { - "autocompleteSource":"/api/v1/suggest/?d=OHJakQICQD6gTD7skx4EWA&", - "genomePositionSearchBox":{ - "autocompleteServer":"https://cgap-higlass.com/api/v1", - "autocompleteId":"P0PLbQMwTYGy-5uPIQid7A", - "chromInfoServer":"https://cgap-higlass.com/api/v1", - "chromInfoId":"hg38", - "visible":true - }, - "chromInfoPath":"//s3.amazonaws.com/pkerp/data/hg38/chromSizes.tsv", - "tracks": { - "top": [ - { - "type": "combined", - "uid": "FkGY-Yv9T8avNXljXklukw", - "height": 55, - "width": 568, - "contents": [ - { - "filetype": "beddb", - "server": "https://cgap-higlass.com/api/v1", - "tilesetUid": "gene_annotation_hg38", - "uid": "FocNIVsfRVWMRzfmHfncsQ", - "type": "gene-annotations", - "options": { - "fontSize": 10, - "labelColor": "black", - "labelBackgroundColor": "#ffffff", - "labelPosition": "hidden", - "labelLeftMargin": 0, - "labelRightMargin": 0, - "labelTopMargin": 0, - "labelBottomMargin": 0, - "minHeight": 24, - "plusStrandColor": "#8a8ccf", - "minusStrandColor": "#e8727a", - "trackBorderWidth": 0, - "trackBorderColor": "black", - "showMousePosition": false, - "mousePositionColor": "#000000", - "geneAnnotationHeight": 12, - "geneLabelPosition": "outside", - "geneStrandSpacing": 2, - "name": "Gene Annotations (hg38)" - }, - "width": 568, - "height": 55 - }, - { - "uid": "d6KeVfkNSmq_YNj_rWJgBA", - "type": "viewport-projection-horizontal", - "fromViewUid": "ab", - "options": { - "projectionFillColor": "#777", - "projectionStrokeColor": "#777", - "projectionFillOpacity": 0.3, - "projectionStrokeOpacity": 0.7, - "strokeWidth": 1 - }, - "width": 568, - "height": 55 - } - ], - "options": {} - } - ], - "left": [], - "center": [], - "right": [], - "bottom": [], - "whole": [], - "gallery": [] - }, - "initialXDomain": [ - 594954043.6728096, - 2673883060.323411 - ], - "initialYDomain": [ - -2681117714.2684245, - -2681117686.505896 - ], - "layout": { - "w": 12, - "h": 2, - "x": 0, - "y": 0 - }, - "uid": "aa" - }, - { - "uid":"ab", - "initialXDomain":[ - 594954043.6728096, - 2673883060.323411 - ], - "genomePositionSearchBox":{ - "autocompleteServer":"https://cgap-higlass.com/api/v1", - "autocompleteId":"P0PLbQMwTYGy-5uPIQid7A", - "chromInfoServer":"https://cgap-higlass.com/api/v1", - "chromInfoId":"hg38", - "visible":true - }, - "chromInfoPath":"//s3.amazonaws.com/pkerp/data/hg38/chromSizes.tsv", - "tracks":{ - "top":[ - { - "filetype":"chromsizes-tsv", - "server":"https://cgap-higlass.com/api/v1", - "tilesetUid":"chromsizes_hg38", - "uid":"AdlJsUYFRzuJRZyYeKDX2A", - "type":"chromosome-labels", - "options":{ - "color":"#808080", - "stroke":"#ffffff", - "fontSize":12, - "fontIsLeftAligned":false, - "showMousePosition":false, - "mousePositionColor":"#000000" - }, - "width":811, - "height":30 - }, - { - "uid":"fastaex", - "type":"horizontal-sequence", - "server": "https://cgap-higlass.com/api/v1", - "data":{ - "type":"fasta", - "fastaUrl":"https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.fa", - "faiUrl":"https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.fa.fai", - "chromSizesUrl":"https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.chrom.sizes" - }, - "options":{ - "colorAggregationMode":"none", - "labelPosition":"topLeft", - "labelColor":"black", - "labelTextOpacity":0.4, - "valueScaling":"linear", - "trackBorderWidth":0, - "trackBorderColor":"white", - "name":"hg38", - "backgroundColor":"white", - "barBorder":true, - "barBorderColor":"white", - "sortLargestOnTop":true, - "extendedPreloading":false, - "colorScale": [ - "#08519c", - "#6baed6", - "#993404", - "#fe9929", - "#808080", - "#DCDCDC" - ] - }, - "width":768, - "height":25 - }, - { - "uid": "emptytrack_transcripts", - "type": "empty", - "options": {}, - "width": 568, - "height": 10 - }, - { - "uid": "texttrack_transcripts", - "type": "text", - "server": "https://cgap-higlass.com/api/v1", - "options": { - "backgroundColor": "#ededed", - "textColor": "#333333", - "fontSize": 11, - "fontFamily": "Arial", - "fontWeight": "normal", - "offsetY": 4, - "align": "left", - "text": "Transcripts" - }, - "width": 568, - "height": 20 - }, - { - "server": "https://cgap-higlass.com/api/v1", - "tilesetUid": "transcripts_hg38", - "uid": "transcript_annotation", - "type": "horizontal-transcripts", - "options": { - "fontSize": 9, - "labelFontColor": "#222222", - "labelBackgroundPlusStrandColor": "#e9e9e9", - "labelBackgroundMinusStrandColor": "#e9e9e9", - "minHeight": 24, - "plusStrandColor": "#bdbfff", - "minusStrandColor": "#fabec2", - "utrColor": "#C0EAAF", - "mousePositionColor": "#000000", - "transcriptHeight": 12, - "transcriptSpacing": 2, - "name": "Gene transcripts", - "fontFamily": "Helvetica", - "maxTexts": 100, - "showToggleTranscriptsButton": true, - "trackHeightAdjustment": "automatic", - "startCollapsed": false, - "sequenceData": { - "type": "fasta", - "fastaUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.fa", - "faiUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.fa.fai", - "chromSizesUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.chrom.sizes" - } - }, - "width": 768, - "height": 134 - }, - { - "uid": "emptytrack_clinvar", - "type": "empty", - "options": {}, - "width": 568, - "height": 10 - }, - { - "uid": "texttrack_clinvar", - "type": "text", - "server": "https://cgap-higlass.com/api/v1", - "options": { - "backgroundColor": "#ededed", - "textColor": "#333333", - "fontSize": 11, - "fontFamily": "Arial", - "fontWeight": "normal", - "offsetY": 4, - "align": "left", - "text": "ClinVar variants" - }, - "width": 568, - "height": 20 - }, - { - "type": "horizontal-clinvar", - "height": 110, - "tilesetUid": "clinvar_20200824_hg38", - "server": "https://cgap-higlass.com/api/v1", - "uid": "clinvar_20200824_v5", - "options": { - "name": "Clinvar" - } - }, - { - "uid": "emptytrack_orthologs", - "type": "empty", - "options": {}, - "width": 568, - "height": 10 - }, - { - "uid": "texttrack_orthologs", - "type": "text", - "server": "https://cgap-higlass.com/api/v1", - "options": { - "backgroundColor": "#ededed", - "textColor": "#333333", - "fontSize": 11, - "fontFamily": "Arial", - "fontWeight": "normal", - "offsetY": 4, - "align": "left", - "text": "Orthologs" - }, - "width": 568, - "height": 20 - }, - { - "uid": "emptytrack_orthologs2", - "type": "empty", - "options": {}, - "width": 568, - "height": 7 - }, - { - "type": "horizontal-orthologs", - "height": 100, - "tilesetUid": "orthologs_transcripts_hg38", - "server": "https://cgap-higlass.com/api/v1", - "uid": "orthologs", - "options": { - "rowHeight": 11, - "name": "Orthologs", - "aminoAcidColor": "#333333", - "aminoAcidColorNoMatch": "#b0b0b0", - "fontSize": 10, - "fontFamily": "Arial", - "gapsColor": "#eb9c00", - "labelTextColor": "#888888", - "minusStrandColor1": "#ffe0e2", - "minusStrandColor2": "#fff0f1", - "minusStrandColorZoomedOut": "#fabec2", - "plusStrandColor1": "#ebebff", - "plusStrandColor2": "#dedeff", - "plusStrandColorZoomedOut": "#bdbfff", - "rowSpacing": 2, - "species": [ - "human", - "macaca_mulatta", - "mouse", - "dog", - "elephant", - "chicken", - "zebrafish" - ] - } - }, - { - "uid": "emptytrack_gnomad", - "type": "empty", - "options": {}, - "width": 568, - "height": 10 - }, - { - "uid": "texttrack_gnomad", - "type": "text", - "server": "https://cgap-higlass.com/api/v1", - "options": { - "backgroundColor": "#ededed", - "textColor": "#333333", - "fontSize": 11, - "fontFamily": "Arial", - "fontWeight": "normal", - "offsetY": 4, - "align": "left", - "text": "GnomAD (allele frequencies)" - }, - "width": 568, - "height": 20 - }, - { - "uid": "emptytrack_gnomad2", - "type": "empty", - "options": {}, - "width": 568, - "height": 7 - }, - { - "filetype": "bigwig", - "server": "https://cgap-higlass.com/api/v1", - "tilesetUid": "gnomad_coverage", - "uid": "dGGE208qQNmBcvlBnfpFuA", - "type": "bar", - "options": { - "align": "bottom", - "labelPosition": "topLeft", - "labelLeftMargin": 0, - "labelRightMargin": 0, - "labelTopMargin": 0, - "labelBottomMargin": 0, - "labelShowResolution": false, - "labelShowAssembly": false, - "axisLabelFormatting": "scientific", - "axisPositionHorizontal": "right", - "barFillColor": "grey", - "valueScaling": "linear", - "trackBorderWidth": 0, - "trackBorderColor": "black", - "labelTextOpacity": 0.4, - "barOpacity": 1, - "valueScaleMin": 0, - "valueScaleMax": 60, - "name": "GnomAd - median coverage" - }, - "width": 768, - "height": 45 - }, - { - "uid": "emptytrack_gnomad3", - "type": "empty", - "options": {}, - "width": 568, - "height": 7 - }, - { - "uid": "gnomad", - "type": "gnomad", - "options": { - "colorScale": [ - [0.3, 0.3, 0.3, 0.6], - [0.6, 0.6, 0.0, 0.7], - [1, 0.0, 0.0, 0.6] - ], - "showMousePosition": false, - "workerScriptLocation": "/static/build/", - "variantHeight": 12 - }, - "data": { - "type": "vcf", - "vcfUrl": "https://cgap-higlass.s3.amazonaws.com/gnomad/gnomad.higlass.v3.1.sites.vcf.gz", - "tbiUrl": "https://cgap-higlass.s3.amazonaws.com/gnomad/gnomad.higlass.v3.1.sites.vcf.gz.tbi", - "chromSizesUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.chrom.sizes" - }, - "width": 768, - "height": 200 - }, - { - "uid": "emptytrack", - "type": "empty", - "options": {}, - "width": 568, - "height": 20 - } - ], - "left":[ - - ], - "center":[ - - ], - "right":[ - - ], - "bottom":[ - - ], - "whole":[ - { - "type": "vertical-rule", - "x": 1000, - "options": { - "color": "lightgrey" - }, - "uid": "vr1", - "width": 20, - "height": 20 - }, - { - "type": "vertical-rule", - "x": 1001, - "options": { - "color": "lightgrey" - }, - "uid": "vr2", - "width": 20, - "height": 20 - } - ], - "gallery":[ - - ] - }, - "layout":{ - "w":12, - "h":12, - "x":0, - "y":0 - }, - "initialYDomain":[ - 655540254.4718345, - 2044910818.0040488 - ] - } - ], - "zoomLocks":{ - "locksByViewUid":{ - - }, - "locksDict":{ - - } - }, - "locationLocks":{ - "locksByViewUid":{ - - }, - "locksDict":{ - - } - }, - "valueScaleLocks":{ - "locksByViewUid":{ - - }, - "locksDict":{ - - } - } - }, - "name": "higlass-default-viewconf", - "title": "Default CGAP hg38 viewconf", - "genome_assembly": "GRCh38", - "uuid": "00000000-1111-0000-1111-000000000000", - "schema_version": "1" - }, - { - "viewconfig":{ - "views": [ - { - "autocompleteSource":"/api/v1/suggest/?d=OHJakQICQD6gTD7skx4EWA&", - "genomePositionSearchBox":{ - "autocompleteServer":"https://cgap-higlass.com/api/v1", - "autocompleteId":"P0PLbQMwTYGy-5uPIQid7A", - "chromInfoServer":"https://cgap-higlass.com/api/v1", - "chromInfoId":"hg38", - "visible":true - }, - "chromInfoPath":"//s3.amazonaws.com/pkerp/data/hg38/chromSizes.tsv", - "tracks": { - "top": [ - { - "type": "combined", - "uid": "FkGY-Yv9T8avNXljXklukw", - "height": 55, - "width": 568, - "contents": [ - { - "filetype": "beddb", - "server": "https://cgap-higlass.com/api/v1", - "tilesetUid": "gene_annotation_hg38", - "uid": "FocNIVsfRVWMRzfmHfncsQ", - "type": "gene-annotations", - "options": { - "fontSize": 10, - "labelColor": "black", - "labelBackgroundColor": "#ffffff", - "labelPosition": "hidden", - "labelLeftMargin": 0, - "labelRightMargin": 0, - "labelTopMargin": 0, - "labelBottomMargin": 0, - "minHeight": 24, - "plusStrandColor": "#8a8ccf", - "minusStrandColor": "#e8727a", - "trackBorderWidth": 0, - "trackBorderColor": "black", - "showMousePosition": false, - "mousePositionColor": "#000000", - "geneAnnotationHeight": 12, - "geneLabelPosition": "outside", - "geneStrandSpacing": 2, - "name": "Gene Annotations (hg38)" - }, - "width": 568, - "height": 55 - }, - { - "uid": "d6KeVfkNSmq_YNj_rWJgBA", - "type": "viewport-projection-horizontal", - "fromViewUid": "ab", - "options": { - "projectionFillColor": "#777", - "projectionStrokeColor": "#777", - "projectionFillOpacity": 0.3, - "projectionStrokeOpacity": 0.7, - "strokeWidth": 1 - }, - "width": 568, - "height": 55 - } - ], - "options": {} - } - ], - "left": [], - "center": [], - "right": [], - "bottom": [], - "whole": [], - "gallery": [] - }, - "initialXDomain": [ - 594954043.6728096, - 2673883060.323411 - ], - "initialYDomain": [ - -2681117714.2684245, - -2681117686.505896 - ], - "layout": { - "w": 12, - "h": 2, - "x": 0, - "y": 0 - }, - "uid": "aa" - }, - { - "uid": "ab", - "layout": { - "h": 12, - "w": 12, - "x": 0, - "y": 0 - }, - "autocompleteSource":"/api/v1/suggest/?d=OHJakQICQD6gTD7skx4EWA&", - "genomePositionSearchBox":{ - "autocompleteServer":"https://cgap-higlass.com/api/v1", - "autocompleteId":"P0PLbQMwTYGy-5uPIQid7A", - "chromInfoServer":"https://cgap-higlass.com/api/v1", - "chromInfoId":"hg38", - "visible":true - }, - "chromInfoPath":"//s3.amazonaws.com/pkerp/data/hg38/chromSizes.tsv", - "tracks": { - "top": [ - { - "filetype":"chromsizes-tsv", - "server":"https://cgap-higlass.com/api/v1", - "tilesetUid":"chromsizes_hg38", - "uid":"AdlJsUYFRzuJRZyYeKDX2A", - "type":"chromosome-labels", - "options":{ - "color":"#808080", - "stroke":"#ffffff", - "fontSize":12, - "fontIsLeftAligned":false, - "showMousePosition":false, - "mousePositionColor":"#000000" - }, - "width":811, - "height":30 - }, - { - "uid": "fastaex", - "data": { - "type": "fasta", - "faiUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.fa.fai", - "fastaUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.fa", - "chromSizesUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.chrom.sizes" - }, - "type": "horizontal-sequence", - "width": 768, - "height": 25, - "options": { - "name": "hg38", - "barBorder": true, - "colorScale": [ - "#08519c", - "#6baed6", - "#993404", - "#fe9929", - "#808080", - "#DCDCDC" - ], - "labelColor": "black", - "valueScaling": "linear", - "labelPosition": "topLeft", - "barBorderColor": "white", - "backgroundColor": "white", - "labelTextOpacity": 0.4, - "sortLargestOnTop": true, - "trackBorderColor": "white", - "trackBorderWidth": 0, - "extendedPreloading": false, - "colorAggregationMode": "none", - "notificationText": "Zoom in to see nucleotides...", - "fontSize": 16, - "fontFamily": "Arial", - "fontColor": "white", - "textOption": { - "fontSize": "32px", - "fontFamily": "Arial", - "fill": 16777215, - "fontWeight": "bold" - } - } - }, - { - "uid": "emptytrack_text_transcripts", - "type": "empty", - "options": {}, - "width": 568, - "height": 10 - }, - { - "uid": "texttrack_transcripts", - "type": "text", - "server": "https://cgap-higlass.com/api/v1", - "options": { - "backgroundColor": "#ededed", - "textColor": "#333333", - "fontSize": 11, - "fontFamily": "Arial", - "fontWeight": "normal", - "offsetY": 4, - "align": "left", - "text": "Canonical transcripts" - }, - "width": 568, - "height": 20 - }, - { - "uid": "emptytrack_transcripts", - "type": "empty", - "options": {}, - "width": 568, - "height": 10 - }, - { - "server": "https://cgap-higlass.com/api/v1", - "tilesetUid": "canonical_transcripts_hg38", - "uid": "transcript_annotation2", - "type": "horizontal-transcripts", - "options": { - "fontSize": 9, - "labelFontColor": "#222222", - "labelBackgroundPlusStrandColor": "#e9e9e9", - "labelBackgroundMinusStrandColor": "#e9e9e9", - "minHeight": 24, - "plusStrandColor": "#bdbfff", - "minusStrandColor": "#fabec2", - "utrColor": "#C0EAAF", - "mousePositionColor": "#000000", - "transcriptHeight": 12, - "transcriptSpacing": 2, - "name": "Gene transcripts", - "fontFamily": "Helvetica", - "maxTexts": 100, - "showToggleTranscriptsButton": false, - "startCollapsed": false, - "sequenceData": { - "type": "fasta", - "fastaUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.fa", - "faiUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.fa.fai", - "chromSizesUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.chrom.sizes" - } - }, - "width": 768, - "height": 40 - }, - { - "uid": "emptytrack_a", - "type": "empty", - "options": {}, - "width": 470, - "height": 10 - }, - { - "uid": "texttrack", - "type": "text", - "server": "https://cgap-higlass.com/api/v1", - "options": { - "backgroundColor": "#ededed", - "textColor": "#333333", - "fontSize": 12, - "fontFamily": "Arial", - "fontWeight": "bold", - "offsetY": 4, - "align": "left", - "text": "Proband" - }, - "width": 568, - "height": 23 - }, - { - "uid": "emptytrack_b", - "type": "empty", - "options": {}, - "width": 470, - "height": 5 - }, - { - "type": "pileup", - "options": { - "axisPositionHorizontal": "right", - "axisLabelFormatting": "normal", - "showCoverage": true, - "outlineReadOnHover": true, - "groupBy": "strand", - "minusStrandColor": "#ffd1d4", - "plusStrandColor": "#cfd0ff", - "colorScale": [ - "#08519c", - "#6baed6", - "#993404", - "#fe9929", - "#808080", - "#DCDCDC" - ], - "workerScriptLocation": "/static/build/" - }, - "height": 200, - "uid": "pileup", - "data": { - "type": "bam", - "chromSizesUrl": "https://aveit.s3.amazonaws.com/higlass/data/sequence/hg38.chrom.sizes" - }, - "width": 470 - } - ], - "left": [], - "right": [], - "whole": [ - { - "x": 1558492997, - "uid": "vr1", - "type": "vertical-rule", - "width": 20, - "height": 20, - "options": { - "color": "lightgrey" - } - }, - { - "x": 1558492998, - "uid": "vr2", - "type": "vertical-rule", - "width": 20, - "height": 20, - "options": { - "color": "lightgrey" - } - } - ], - "bottom": [], - "center": [], - "gallery": [] - }, - "initialXDomain": [ - 1069795.099783097, - 1070508.6726894341 - ], - "initialYDomain": [ - 1350225534.8565273, - 1350225534.8565273 - ] - } - ], - "editable": true, - "zoomFixed": false, - "zoomLocks": { - "locksByViewUid": {}, - "locksDict": {} - }, - "exportViewUrl": "/api/v1/viewconfs", - "locationLocks": { - "locksByViewUid": {}, - "locksDict": {} - }, - "valueScaleLocks": { - "locksByViewUid": {}, - "locksDict": {} - }, - "trackSourceServers": [ - "https://cgap-higlass.com/api/v1" - ] - }, - "name": "higlass-bam-viewconf", - "title": "Default CGAP BAM viewconf", - "genome_assembly": "GRCh38", - "uuid": "9146eeba-ebb8-41aa-93a8-ada8efaff64b", - "schema_version": "1" - } -] diff --git a/src/encoded/tests/test_fixtures.py b/src/encoded/tests/test_fixtures.py index dffc431ef4..e32818ff7d 100644 --- a/src/encoded/tests/test_fixtures.py +++ b/src/encoded/tests/test_fixtures.py @@ -97,7 +97,7 @@ def test_fixtures2(minitestdata2, testapp): assert len(items) == 1 -def test_order_complete(app, conn): +def test_order_complete(app): order = ORDER + ['access_key'] environ = { 'HTTP_ACCEPT': 'application/json', diff --git a/src/encoded/tests/test_ingestion_listener.py b/src/encoded/tests/test_ingestion_listener.py index 7fe653ffeb..96db15b885 100644 --- a/src/encoded/tests/test_ingestion_listener.py +++ b/src/encoded/tests/test_ingestion_listener.py @@ -227,7 +227,6 @@ def mocked_should_remain_online(override=None): def test_test_port(): - from snovault.tests.test_postgresql_fixture import SNOVAULT_DB_TEST_PORT assert SNOVAULT_DB_TEST_PORT == 5440 diff --git a/src/encoded/tests/test_load_access_key.py b/src/encoded/tests/test_load_access_key.py index 5d727b9a54..e935ca84b7 100644 --- a/src/encoded/tests/test_load_access_key.py +++ b/src/encoded/tests/test_load_access_key.py @@ -7,9 +7,8 @@ pytestmark = [pytest.mark.setone, pytest.mark.working] -# TODO: test load_access_keys.get_existing_key_ids, which would use ES - - +# TODO: Re-enable once running ECS in production +@pytest.mark.skip def test_gen_access_keys(testapp, admin): with mock.patch.object(load_access_keys, 'get_beanstalk_real_url') as mocked_url: mocked_url.return_value = 'http://fourfront-hotseat' diff --git a/src/encoded/tests/test_post_put_patch.py b/src/encoded/tests/test_post_put_patch.py index b5609cb6d1..9549d49ac8 100644 --- a/src/encoded/tests/test_post_put_patch.py +++ b/src/encoded/tests/test_post_put_patch.py @@ -248,7 +248,7 @@ def test_post_check_only_invalid_data(testapp, disorder_data): ''' note theese test should work on any object ''' - disorder_data['taxon_id'] = 24; + disorder_data['taxon_id'] = 24 testapp.post_json('/disorder/?check_only=true', disorder_data, status=422) @@ -327,7 +327,7 @@ def test_patch_delete_fields_restricted_fields_admin(link_targets, testapp): res = testapp.post_json(COLLECTION_URL, item_with_link[0], status=201) url = res.location assert res.json['@graph'][0]['protected_link'] - res = testapp.patch_json(url + "?delete_fields=protected_link", {}, status=200) + testapp.patch_json(url + "?delete_fields=protected_link", {}, status=200) def test_patch_delete_fields_restricted_fields_submitter(content, testapp, submitter_testapp): @@ -344,10 +344,14 @@ def test_patch_delete_fields_restricted_fields_submitter(content, testapp, submi res1 = submitter_testapp.patch_json(url + "?delete_fields=protected", {}, status=200) assert res1.json['@graph'][0]['protected'] == 'protected default' - # change protected value + # submitter cannot change value + submitter_testapp.patch_json(url, {'protected': 'protected new'}, status=422) + + # admin can change protected value res = testapp.patch_json(url, {'protected': 'protected new'}, status=200) assert res.json['@graph'][0]['protected'] == 'protected new' + # results in a delta in the protected field, reject res2 = submitter_testapp.patch_json(url + "?delete_fields=protected", {}, status=422) res_errors = res2.json['errors'] assert len(res_errors) == 2 diff --git a/src/encoded/types/access_key.py b/src/encoded/types/access_key.py index db55a1abfa..5fe8e01ee1 100644 --- a/src/encoded/types/access_key.py +++ b/src/encoded/types/access_key.py @@ -8,6 +8,7 @@ Everyone, ) from pyramid.settings import asbool +import datetime from .base import ( Item, DELETED_ACL, @@ -48,7 +49,7 @@ ]) class AccessKey(Item): """AccessKey class.""" - + ACCESS_KEY_EXPIRATION_TIME = 90 # days item_type = 'access_key' schema = load_schema('encoded:schemas/access_key.json') name_key = 'access_key_id' @@ -59,6 +60,13 @@ class AccessKey(Item): 'deleted': DELETED_ACL, } + @classmethod + def create(cls, registry, uuid, properties, sheets=None): + """ Sets the access key timeout 90 days from creation. """ + properties['expiration_date'] = (datetime.datetime.utcnow() + datetime.timedelta( + days=cls.ACCESS_KEY_EXPIRATION_TIME)).isoformat() + return super().create(registry, uuid, properties, sheets) + def __ac_local_roles__(self): """grab and return user as owner.""" owner = 'userid.%s' % self.properties['user'] @@ -77,6 +85,9 @@ def update(self, properties, sheets=None): new_properties = self.properties.copy() new_properties.update(properties) properties = new_properties + # set new expiration + properties['expiration_date'] = (datetime.datetime.utcnow() + datetime.timedelta( + days=self.ACCESS_KEY_EXPIRATION_TIME)).isoformat() self._update(properties, sheets) class Collection(Item.Collection): diff --git a/src/encoded/types/base.py b/src/encoded/types/base.py index 6873786945..bbf267aea3 100644 --- a/src/encoded/types/base.py +++ b/src/encoded/types/base.py @@ -331,11 +331,6 @@ def is_update_by_admin_user(self): return False def _update(self, properties, sheets=None): - props = {} - try: - props = self.properties - except KeyError: - pass add_last_modified(properties) super(Item, self)._update(properties, sheets) diff --git a/src/encoded/types/family.py b/src/encoded/types/family.py index 021f87c59b..5137e9caa2 100644 --- a/src/encoded/types/family.py +++ b/src/encoded/types/family.py @@ -654,7 +654,7 @@ def process_pedigree(context, request): fam_props = context.upgrade_properties() post_extra = {'project': fam_props['project'], 'institution': fam_props['institution']} - xml_extra = {'ped_datetime': ped_datetime} + xml_extra = {'ped_datetime': ped_datetime.isoformat()} family_uuids = create_family_proband(testapp, xml_data, refs, 'managedObjectID', family_item, post_extra, xml_extra) diff --git a/src/encoded/types/file.py b/src/encoded/types/file.py index ef42f106bb..6ac0a45497 100644 --- a/src/encoded/types/file.py +++ b/src/encoded/types/file.py @@ -19,7 +19,8 @@ from pyramid.threadlocal import get_current_request from pyramid.traversal import resource_path from pyramid.view import view_config -from dcicutils.env_utils import CGAP_ENV_WEBPROD, CGAP_ENV_WOLF +from dcicutils.secrets_utils import assume_identity +from dcicutils.qa_utils import override_environ from snovault import ( AfterModified, BeforeModified, @@ -110,12 +111,22 @@ def external_creds(bucket, key, name=None, profile_name=None): } ] } - # boto.set_stream_logger('boto3') - conn = boto3.client('sts', aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID"), - aws_secret_access_key=os.environ.get("AWS_SECRET_ACCESS_KEY")) + # In the new environment, extract S3 Keys from global application configuration + if 'IDENTITY' in os.environ: + identity = assume_identity() + with override_environ(**identity): + conn = boto3.client('sts', aws_access_key_id=os.environ.get('S3_AWS_ACCESS_KEY_ID'), + aws_secret_access_key=os.environ.get('S3_AWS_SECRET_ACCESS_KEY')) + # In the old account, we are always passing IAM User creds so these will just work + else: + conn = boto3.client('sts', aws_access_key_id=os.environ.get('AWS_ACCESS_KEY_ID'), + aws_secret_access_key=os.environ.get('AWS_SECRET_ACCESS_KEY')) token = conn.get_federation_token(Name=name, Policy=json.dumps(policy)) # 'access_key' 'secret_key' 'expiration' 'session_token' credentials = token.get('Credentials') + # Convert Expiration datetime object to string via cast + # Uncaught serialization error picked up by Docker - Will 2/25/2021 + credentials['Expiration'] = str(credentials['Expiration']) credentials.update({ 'upload_url': 's3://{bucket}/{key}'.format(bucket=bucket, key=key), 'federated_user_arn': token.get('FederatedUser').get('Arn'), diff --git a/src/encoded/visualization.py b/src/encoded/visualization.py index 474bb59202..da34c06f92 100644 --- a/src/encoded/visualization.py +++ b/src/encoded/visualization.py @@ -1,3 +1,4 @@ +import os from copy import ( copy, deepcopy @@ -104,7 +105,7 @@ def get_higlass_viewconf(context, request): request(obj): Http request object. Assumes request's request is JSON and contains these keys: requesting_tab(str) : "annotation" or "bam" variant_pos_abs(int) : Center of the viewconf in abs genome coordinates - + Returns: A dictionary. success(bool) : Boolean indicating success. @@ -112,7 +113,7 @@ def get_higlass_viewconf(context, request): viewconfig(dict) : Dict representing the new viewconfig. """ - requesting_tab = request.json_body.get('requesting_tab', None) + requesting_tab = request.json_body.get('requesting_tab', None) requesting_tab = requesting_tab if requesting_tab else "annotation" viewconf_uuid = "00000000-1111-0000-1111-000000000000" if requesting_tab == "annotation" else "9146eeba-ebb8-41aa-93a8-ada8efaff64b" @@ -126,10 +127,13 @@ def get_higlass_viewconf(context, request): "success" : False, "errors": "No view config found.", "viewconfig": None - } + } - # We need absolute URLs for the BAM adn GnomAD Worker - host_url = "http://localhost:6543" + # We need absolute URLs for the BAM and GnomAD Worker + # XXX: this needs a better workaround - Will June 22 2021 + host_url = 'localhost:6543' + if 'IDENTITY' in os.environ: # detect use of global application configuration + host_url = "http://c4ecstrialalphacgapmastertest-273357903.us-east-1.elb.amazonaws.com" if request.registry.settings.get('env.name') == CGAP_ENV_WEBPROD: host_url = CGAP_PUBLIC_URL_PRD elif request.registry.settings.get('env.name') == CGAP_ENV_MASTERTEST: @@ -138,18 +142,18 @@ def get_higlass_viewconf(context, request): host_url = f"http://{CGAP_ENV_DEV}.9wzadzju3p.us-east-1.elasticbeanstalk.com" if requesting_tab == "annotation": - variant_pos = request.json_body.get('variant_pos_abs', None) + variant_pos = request.json_body.get('variant_pos_abs', None) variant_pos = variant_pos if variant_pos else 100000 window_size_small = 20 # window size for the interpretation space window_size_large = 5000 # window size for the overview # Overview higlass_viewconfig['views'][0]['initialXDomain'][0] = variant_pos - window_size_large - higlass_viewconfig['views'][0]['initialXDomain'][1] = variant_pos + window_size_large + higlass_viewconfig['views'][0]['initialXDomain'][1] = variant_pos + window_size_large # Details higlass_viewconfig['views'][1]['initialXDomain'][0] = variant_pos - window_size_small - higlass_viewconfig['views'][1]['initialXDomain'][1] = variant_pos + window_size_small + higlass_viewconfig['views'][1]['initialXDomain'][1] = variant_pos + window_size_small # Vertical rules higlass_viewconfig['views'][1]['tracks']['whole'][0]['x'] = variant_pos @@ -159,18 +163,18 @@ def get_higlass_viewconf(context, request): higlass_viewconfig['views'][1]['tracks']['top'][17]['options']['workerScriptLocation'] = host_url + wsl elif requesting_tab == "bam": - variant_pos = request.json_body.get('variant_pos_abs', None) + variant_pos = request.json_body.get('variant_pos_abs', None) variant_pos = variant_pos if variant_pos else 100000 # This is the id of the variant sample that we are currently looking at. # This should be the first file in the Higlass viewconf - bam_sample_id = request.json_body.get('bam_sample_id', None) + bam_sample_id = request.json_body.get('bam_sample_id', None) window_size_small = 20 # window size for the interpretation space window_size_large = 5000 # window size for the overview #s3_bucket = request.registry.settings.get('file_wfout_bucket') s3_bucket = "elasticbeanstalk-fourfront-cgap-wfoutput" - samples_pedigree = request.json_body.get('samples_pedigree', None) + samples_pedigree = request.json_body.get('samples_pedigree', None) samples_pedigree.sort(key=lambda x: x['sample_name'] == bam_sample_id, reverse=True) top_tracks = higlass_viewconfig['views'][1]['tracks']['top'] @@ -181,7 +185,7 @@ def get_higlass_viewconf(context, request): # Delete original tracks from the insert, replace them with adjusted data # from the sample data. If there is no data, we only show the sequence track - del top_tracks[6:10] + del top_tracks[6:10] # print(json.dumps(top_tracks, indent=2)) for sample in samples_pedigree: @@ -209,20 +213,20 @@ def get_higlass_viewconf(context, request): # Show the correct location higlass_viewconfig['views'][0]['initialXDomain'][0] = variant_pos - window_size_large - higlass_viewconfig['views'][0]['initialXDomain'][1] = variant_pos + window_size_large + higlass_viewconfig['views'][0]['initialXDomain'][1] = variant_pos + window_size_large higlass_viewconfig['views'][1]['initialXDomain'][0] = variant_pos - window_size_small - higlass_viewconfig['views'][1]['initialXDomain'][1] = variant_pos + window_size_small + higlass_viewconfig['views'][1]['initialXDomain'][1] = variant_pos + window_size_small # Vertical rules higlass_viewconfig['views'][1]['tracks']['whole'][0]['x'] = variant_pos higlass_viewconfig['views'][1]['tracks']['whole'][1]['x'] = variant_pos + 1 - + return { "success" : True, "errors": "", "viewconfig" : higlass_viewconfig - } + } def create_presigned_url(bucket_name, object_name, expiration=3600): """Generate a presigned URL to share an S3 object @@ -243,6 +247,5 @@ def create_presigned_url(bucket_name, object_name, expiration=3600): return None # The response contains the presigned URL - return response + return response - \ No newline at end of file diff --git a/testing.cfg b/testing.cfg deleted file mode 100644 index 7f802f1dd2..0000000000 --- a/testing.cfg +++ /dev/null @@ -1,8 +0,0 @@ -[buildout] -extends = buildout.cfg - -[production-ini] -create_tables = true -load_test_data = encoded.loadxl:load_prod_data - - diff --git a/webpack.config.js b/webpack.config.js index 1f386a9465..92680fd2b9 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -75,7 +75,7 @@ const optimization = { minimize: mode === "production", minimizer: [ new TerserPlugin({ - parallel: false, + parallel: false, // XXX: this option causes docker build to fail - Will 2/25/2021 sourceMap: true, terserOptions:{ compress: true,