Skip to content

Commit

Permalink
Merge pull request #44 from 4dn-dcic/custom_admin_keys
Browse files Browse the repository at this point in the history
args on get_access_key for secret and filename
  • Loading branch information
carlvitzthum authored Sep 1, 2019
2 parents c3f5d9a + 6a42bff commit 5d7937e
Show file tree
Hide file tree
Showing 7 changed files with 49 additions and 34 deletions.
2 changes: 1 addition & 1 deletion dcicutils/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Version information."""

# The following line *must* be the last in the module, exactly as formatted:
__version__ = "0.7.7"
__version__ = "0.8.0"
3 changes: 3 additions & 0 deletions dcicutils/ff_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,6 +596,9 @@ def get_schema_names(key=None, ff_env=None):
schema_name = {}
profiles = get_metadata('/profiles/', key=auth, add_on='frame=raw')
for key, value in profiles.items():
# skip abstract types
if value.get('isAbstract') is True:
continue
# some test schemas in local don't have the id field
schema_filename = value.get('id')
if schema_filename:
Expand Down
36 changes: 15 additions & 21 deletions dcicutils/s3_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,15 @@ def __init__(self, outfile_bucket=None, sys_bucket=None, raw_file_bucket=None,
'''
if we pass in env set the outfile and sys bucket from the environment
'''

# avoid circular ref
from dcicutils.beanstalk_utils import get_beanstalk_real_url
self.url = ''
self.s3 = boto3.client('s3', region_name='us-east-1')
# avoid circular ref, import as needed
from dcicutils import beanstalk_utils as bs
if sys_bucket is None:
# staging and production share same buckets
if env:
if 'webprod' in env or env in ['staging', 'stagging', 'data']:
self.url = bs.get_beanstalk_real_url(env)
self.url = get_beanstalk_real_url(env)
env = 'fourfront-webprod'
# we use standardized naming schema, so s3 buckets always have same prefix
sys_bucket = "elasticbeanstalk-%s-system" % env
Expand All @@ -44,10 +43,8 @@ def __init__(self, outfile_bucket=None, sys_bucket=None, raw_file_bucket=None,
self.raw_file_bucket = raw_file_bucket
self.blob_bucket = blob_bucket

def get_access_keys(self):
name = 'illnevertell'
def get_access_keys(self, name='access_key_admin'):
keys = self.get_key(keyfile_name=name)

if isinstance(keys.get('default'), dict):
keys = keys['default']
if self.url:
Expand All @@ -58,20 +55,21 @@ def get_ff_key(self):
return self.get_access_keys()

def get_higlass_key(self):
return self.get_key(keyfile_name='hiwillnevertell')
# higlass key corresponds to Django server super user credentials
return self.get_key(keyfile_name='api_key_higlass')

def get_google_key(self):
return self.get_key(keyfile_name='fourdn-fourfront-google-key')
return self.get_key(keyfile_name='api_key_google')

def get_jupyterhub_key(self):
# the jupyterhub key is a Jupyterhub API token
return self.get_key(keyfile_name='jupyterhub-fourfront-key')
# jupyterhub key is a Jupyterhub API token
return self.get_key(keyfile_name='api_key_jupyterhub')

def get_key(self, keyfile_name='illnevertell'):
def get_key(self, keyfile_name='access_key_admin'):
# Share secret encrypted S3 File
response = self.s3.get_object(Bucket=self.sys_bucket,
Key=keyfile_name,
SSECustomerKey=os.environ.get("SECRET"),
SSECustomerKey=os.environ['S3_ENCRYPT_KEY'],
SSECustomerAlgorithm='AES256')
akey = response['Body'].read()
if type(akey) == bytes:
Expand Down Expand Up @@ -138,22 +136,18 @@ def s3_put(self, obj, upload_key, acl=None):
Key=upload_key,
Body=obj,
ContentType=content_type,
ACL=acl
)
ACL=acl)
else:
return self.s3.put_object(Bucket=self.outfile_bucket,
Key=upload_key,
Body=obj,
ContentType=content_type
)
ContentType=content_type)

def s3_put_secret(self, data, keyname, bucket=None, secret=None):
if not bucket:
bucket = self.sys_bucket
if secret is None:
secret = os.environ.get("SECRET")
if secret is None:
raise RuntimeError("SECRET should be defined in env")
if not secret:
secret = os.environ["S3_ENCRYPT_KEY"]
return self.s3.put_object(Bucket=bucket,
Key=keyname,
Body=data,
Expand Down
4 changes: 4 additions & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,12 @@
pytest
pytest-cov
pytest-mock
pytest-runner
flaky

# used with structlog
colorama

# Build tasks
invoke

Expand Down
3 changes: 1 addition & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ boto3==1.7.42
botocore==1.10.42
elasticsearch==5.5.3
aws_requests_auth==0.4.1
urllib3==1.23
urllib3==1.24.2
structlog==18.1.0
requests==2.20.0
pytest-runner
28 changes: 18 additions & 10 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,35 @@
import os
import io
from setuptools import setup
from os import path

# variables used in buildout
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
this_directory = path.abspath(path.dirname(__file__))
with io.open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()


# we want strict package requirements on install
with open('requirements.txt') as f:
requires = f.read().splitlines()
requires = [req.strip() for req in requires]
requires = [
'boto3>=1.7.42',
'botocore>=1.10.42',
'elasticsearch==5.5.3',
'aws_requests_auth>=0.4.1',
'urllib3>=1.23',
'structlog>=18.1.0',
'requests>=2.20.0'
]

tests_require = [
'pytest',
'pytest-mock',
'pytest-cov',
'flaky'
]

setup(
name='dcicutils',
version=open("dcicutils/_version.py").readlines()[-1].split()[-1].strip("\"'"),
description='Utility modules shared amongst several repos in the 4dn-dcic organization',
long_description=README,
long_description=long_description,
long_description_content_type='text/markdown',
packages=['dcicutils'],
include_package_data=True,
zip_safe=False,
Expand All @@ -30,7 +38,7 @@
url='https://data.4dnucleome.org',
license='MIT',
install_requires=requires,
setup_requires=requires,
setup_requires=['pytest-runner', 'colorama'],
tests_require=tests_require,
extras_require={
'test': tests_require,
Expand Down
7 changes: 7 additions & 0 deletions test/test_s3_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,13 @@ def test_s3Utils_get_keys_for_data():
util = s3Utils(env='data')
keys = util.get_access_keys()
assert keys['server'] == 'https://data.4dnucleome.org'
# make sure we have keys for foursight and tibanna as well
keys_tb = util.get_access_keys('access_key_tibanna')
assert keys_tb['key'] != keys['key']
assert keys_tb['server'] == keys['server']
keys_fs = util.get_access_keys('access_key_foursight')
assert keys_fs['key'] != keys_tb['key'] != keys['key']
assert keys_fs['server'] == keys['server']


def test_s3Utils_get_keys_for_staging():
Expand Down

0 comments on commit 5d7937e

Please sign in to comment.