diff --git a/ESSArch_Core/WorkflowEngine/models.py b/ESSArch_Core/WorkflowEngine/models.py index 87785f502..df80085b1 100644 --- a/ESSArch_Core/WorkflowEngine/models.py +++ b/ESSArch_Core/WorkflowEngine/models.py @@ -604,15 +604,17 @@ def reraise(self): def create_remote_copy(self, session, host): create_remote_task_url = urljoin(host, reverse('processtask-list')) params = copy.deepcopy(self.params) - params.pop('_options', None) + params['storage_object'] = str(params['storage_object']) if params.get('storage_object') is not None else None ip_id = str(self.information_package.pk) if self.information_package.pk is not None else None + responsible_username = self.responsible.username if self.responsible is not None else None data = { 'id': str(self.pk), 'name': self.name, 'args': self.args, - 'params': self.params, + 'params': params, 'eager': self.eager, 'information_package': ip_id, + 'responsible': responsible_username, } r = session.post(create_remote_task_url, json=data, timeout=60) @@ -627,14 +629,16 @@ def create_remote_copy(self, session, host): def update_remote_copy(self, session, host): update_remote_task_url = urljoin(host, reverse('processtask-detail', args=(str(self.pk),))) params = copy.deepcopy(self.params) - params.pop('_options', None) + params['storage_object'] = str(params['storage_object']) if params.get('storage_object') is not None else None ip_id = str(self.information_package.pk) if self.information_package.pk is not None else None + responsible_username = self.responsible.username if self.responsible is not None else None data = { 'name': self.name, 'args': self.args, - 'params': self.params, + 'params': params, 'eager': self.eager, 'information_package': ip_id, + 'responsible': responsible_username, } r = session.patch(update_remote_task_url, json=data, timeout=60) diff --git a/ESSArch_Core/WorkflowEngine/serializers.py b/ESSArch_Core/WorkflowEngine/serializers.py index f7c9bd575..201b13b48 100644 --- a/ESSArch_Core/WorkflowEngine/serializers.py +++ b/ESSArch_Core/WorkflowEngine/serializers.py @@ -25,6 +25,7 @@ import uuid from celery import states as celery_states +from django.contrib.auth import get_user_model from rest_framework import serializers from ESSArch_Core.auth.fields import CurrentUsernameDefault @@ -33,6 +34,8 @@ from ESSArch_Core.WorkflowEngine.models import ProcessStep, ProcessTask from ESSArch_Core.WorkflowEngine.util import get_result +User = get_user_model() + class ProcessStepChildrenSerializer(serializers.Serializer): url = serializers.SerializerMethodField() @@ -84,7 +87,7 @@ class ProcessTaskSerializer(serializers.ModelSerializer): args = serializers.JSONField(required=False) params = serializers.SerializerMethodField() responsible = serializers.SlugRelatedField( - slug_field='username', read_only=True + slug_field='username', queryset=User.objects.all(), required=False, ) def get_params(self, obj): diff --git a/ESSArch_Core/celery/backends/database.py b/ESSArch_Core/celery/backends/database.py index 3ee4afdc2..f4ac517af 100644 --- a/ESSArch_Core/celery/backends/database.py +++ b/ESSArch_Core/celery/backends/database.py @@ -101,8 +101,11 @@ def _get_task_meta_for(self, task_id): @classmethod def exception_to_python(cls, exc): - """Convert serialized exception to Python exception.""" + """Convert serialized exception or string to Python exception.""" if exc: + if isinstance(exc, str): + exc = Exception(exc) + if not isinstance(exc, BaseException): exc_module = exc.get('exc_module') if exc_module is None: diff --git a/ESSArch_Core/configuration/serializers.py b/ESSArch_Core/configuration/serializers.py index 833f5abb9..301fc91ca 100644 --- a/ESSArch_Core/configuration/serializers.py +++ b/ESSArch_Core/configuration/serializers.py @@ -34,6 +34,7 @@ Site, StoragePolicy, ) +from ESSArch_Core.exceptions import Conflict from ESSArch_Core.storage.models import ( StorageMethod, StorageMethodTargetRelation, @@ -133,10 +134,80 @@ class Meta: class StoragePolicySerializer(serializers.ModelSerializer): - cache_storage = StorageMethodSerializer() + cache_storage = StorageMethodSerializer(allow_null=True) storage_methods = StorageMethodSerializer(many=True) ingest_path = PathSerializer() + def create_storage_method(self, data): + if data is None: + return None + + storage_method_target_set_data = data.pop('storage_method_target_relations') + storage_method, _ = StorageMethod.objects.update_or_create( + id=data['id'], + defaults=data + ) + + for storage_method_target_data in storage_method_target_set_data: + storage_target_data = storage_method_target_data.pop('storage_target') + storage_target_data.pop('remote_server', None) + storage_target, _ = StorageTarget.objects.update_or_create( + id=storage_target_data['id'], + defaults=storage_target_data + ) + storage_method_target_data['storage_method'] = storage_method + storage_method_target_data['storage_target'] = storage_target + storage_method_target, _ = StorageMethodTargetRelation.objects.update_or_create( + id=storage_method_target_data['id'], + defaults=storage_method_target_data + ) + + return storage_method + + def create(self, validated_data): + storage_method_set_data = validated_data.pop('storage_methods') + cache_storage_data = validated_data.pop('cache_storage') + ingest_path_data = validated_data.pop('ingest_path') + + cache_storage = self.create_storage_method(cache_storage_data) + ingest_path, _ = Path.objects.update_or_create(entity=ingest_path_data['entity'], defaults=ingest_path_data) + + validated_data['cache_storage'] = cache_storage + validated_data['ingest_path'] = ingest_path + + policy, _ = StoragePolicy.objects.update_or_create(policy_id=validated_data['policy_id'], + defaults=validated_data) + + for storage_method_data in storage_method_set_data: + storage_method = self.create_storage_method(storage_method_data) + policy.storage_methods.add(storage_method) + # add to policy, dummy + + return policy + + def update(self, instance, validated_data): + storage_method_set_data = validated_data.pop('storage_methods') + cache_storage_data = validated_data.pop('cache_storage') + ingest_path_data = validated_data.pop('ingest_path') + + cache_storage = self.create_storage_method(cache_storage_data) + ingest_path, _ = Path.objects.update_or_create(entity=ingest_path_data['entity'], defaults=ingest_path_data) + + validated_data['cache_storage'] = cache_storage + validated_data['ingest_path'] = ingest_path + + for storage_method_data in storage_method_set_data: + storage_method = self.create_storage_method(storage_method_data) + instance.storage_methods.add(storage_method) + + return super().update(instance, validated_data) + + def validate(self, data): + if self.instance is None and StoragePolicy.objects.filter(pk=data.get('id')).exists(): + raise Conflict('Storage policy already exists') + + return data + class Meta: model = StoragePolicy fields = ( @@ -153,6 +224,7 @@ class Meta: ) extra_kwargs = { 'id': { + 'read_only': False, 'validators': [], }, 'policy_id': { diff --git a/ESSArch_Core/frontend/static/frontend/lang/en/import.ts b/ESSArch_Core/frontend/static/frontend/lang/en/import.ts index 5e1a13534..768bd7371 100644 --- a/ESSArch_Core/frontend/static/frontend/lang/en/import.ts +++ b/ESSArch_Core/frontend/static/frontend/lang/en/import.ts @@ -18,6 +18,9 @@ export default ($translateProvider: ng.translate.ITranslateProvider) => { SA_EXISTS_DESC: 'Submission agreement with same ID already exists. Would you like to overwrite it?', SA_IMPORTED: 'Submission agreement "{{name}}" has been imported. \nID: {{id}}', SA_IS_PUBLISHED_CANNOT_BE_OVERWRITTEN: 'Submission Agreement {{name}} is Published and can not be overwritten', + STORAGEPOLICY_EXISTS: 'Storage policy exists', + STORAGEPOLICY_EXISTS_DESC: 'Storage policy with same ID already exists. Would you like to overwrite it?', + STORAGEPOLICY_IMPORTED: 'Storage policy: "{{name}}" has been imported. \nID: {{id}}', }, }); }; diff --git a/ESSArch_Core/frontend/static/frontend/lang/sv/import.ts b/ESSArch_Core/frontend/static/frontend/lang/sv/import.ts index 6318b5786..4153a7d64 100644 --- a/ESSArch_Core/frontend/static/frontend/lang/sv/import.ts +++ b/ESSArch_Core/frontend/static/frontend/lang/sv/import.ts @@ -19,6 +19,9 @@ export default ($translateProvider: ng.translate.ITranslateProvider) => { SA_IMPORTED: 'Leveransöverenskommelse "{{name}}" har importerats. \nID: {{id}}', SA_IS_PUBLISHED_CANNOT_BE_OVERWRITTEN: 'Leveransöverenskommelse {{name}} är publicerad och kan inte skrivas över', + STORAGEPOLICY_EXISTS: 'Lagringsregelverk finns redan', + STORAGEPOLICY_EXISTS_DESC: 'Lagringsregelverk med samma ID finns redan. VIll du skriva över den?', + STORAGEPOLICY_IMPORTED: 'Lagringsregelverk: "{{name}}" har importerats. \nID: {{id}}', }, }); }; diff --git a/ESSArch_Core/frontend/static/frontend/scripts/components/ImportComponent.js b/ESSArch_Core/frontend/static/frontend/scripts/components/ImportComponent.js index bb2373318..bd0dcbfe8 100644 --- a/ESSArch_Core/frontend/static/frontend/scripts/components/ImportComponent.js +++ b/ESSArch_Core/frontend/static/frontend/scripts/components/ImportComponent.js @@ -13,6 +13,7 @@ export default { 'Notifications', '$uibModal', '$translate', + 'StoragePolicy', controller, ], controllerAs: 'vm', diff --git a/ESSArch_Core/frontend/static/frontend/scripts/controllers/ImportCtrl.js b/ESSArch_Core/frontend/static/frontend/scripts/controllers/ImportCtrl.js index 46b4cc06f..c6921aa4c 100644 --- a/ESSArch_Core/frontend/static/frontend/scripts/controllers/ImportCtrl.js +++ b/ESSArch_Core/frontend/static/frontend/scripts/controllers/ImportCtrl.js @@ -1,5 +1,5 @@ export default class ImportCtrl { - constructor($q, $rootScope, $scope, $http, IP, Profile, SA, Notifications, $uibModal, $translate) { + constructor($q, $rootScope, $scope, $http, IP, Profile, SA, Notifications, $uibModal, $translate, StoragePolicy) { const vm = this; $scope.angular = angular; vm.loadingSas = false; @@ -70,6 +70,23 @@ export default class ImportCtrl { }); }) ); + } else if (key === 'policy') { + promises.push( + $http.get(vm.url + '/api/storage-policies/' + sa[key] + '/', {headers: headers}).then(function (response) { + const data = response.data; + return StoragePolicy.new(data) + .$promise.then(function (response) { + return response; + }) + .catch(function (response) { + vm.importingSa = false; + if (response.status == 409) { + storagePolicyExistsModal(data); + } + return response; + }); + }) + ); } else { } } @@ -198,6 +215,24 @@ export default class ImportCtrl { }); modalInstance.result.then(function (data) {}); } + function storagePolicyExistsModal(profile) { + const modalInstance = $uibModal.open({ + animation: true, + ariaLabelledBy: 'modal-title', + ariaDescribedBy: 'modal-body', + templateUrl: 'static/frontend/views/modals/storagePolicy-exists-modal.html', + controller: 'OverwriteModalInstanceCtrl', + controllerAs: '$ctrl', + resolve: { + data: function () { + return { + profile: profile, + }; + }, + }, + }); + modalInstance.result.then(function (data) {}); + } vm.triggerProfileUpload = function () { document.getElementById('profile-upload').click(); }; diff --git a/ESSArch_Core/frontend/static/frontend/scripts/controllers/OverwriteModalInstanceCtrl.js b/ESSArch_Core/frontend/static/frontend/scripts/controllers/OverwriteModalInstanceCtrl.js index ac0f11f77..478600089 100644 --- a/ESSArch_Core/frontend/static/frontend/scripts/controllers/OverwriteModalInstanceCtrl.js +++ b/ESSArch_Core/frontend/static/frontend/scripts/controllers/OverwriteModalInstanceCtrl.js @@ -1,5 +1,5 @@ export default class OverwriteModalInstanceCtrl { - constructor($uibModalInstance, data, Profile, SA, Notifications, $translate) { + constructor($uibModalInstance, data, Profile, SA, Notifications, $translate, StoragePolicy) { const $ctrl = this; if (data.file) { $ctrl.file = data.file; @@ -20,6 +20,18 @@ export default class OverwriteModalInstanceCtrl { return resource; }); }; + $ctrl.overwriteStoragePolicy = function () { + return StoragePolicy.update($ctrl.profile).$promise.then(function (resource) { + Notifications.add($translate.instant('IMPORT.STORAGEPOLICY_IMPORTED', resource), 'success', 5000, { + isHtml: true, + }); + $ctrl.data = { + status: 'overwritten', + }; + $uibModalInstance.close($ctrl.data); + return resource; + }); + }; $ctrl.overwriteSa = function () { return SA.update($ctrl.profile) .$promise.then(function (resource) { diff --git a/ESSArch_Core/frontend/static/frontend/scripts/modules/essarch.controllers.module.js b/ESSArch_Core/frontend/static/frontend/scripts/modules/essarch.controllers.module.js index 269918d70..ab12475ca 100644 --- a/ESSArch_Core/frontend/static/frontend/scripts/modules/essarch.controllers.module.js +++ b/ESSArch_Core/frontend/static/frontend/scripts/modules/essarch.controllers.module.js @@ -784,6 +784,7 @@ export default angular 'SA', 'Notifications', '$translate', + 'StoragePolicy', OverwriteModalInstanceCtrl, ]) .controller('PlaceNodeInArchiveModalInstanceCtrl', [ diff --git a/ESSArch_Core/frontend/static/frontend/scripts/services/storagePolicy.js b/ESSArch_Core/frontend/static/frontend/scripts/services/storagePolicy.js index 9c443885e..000544dd4 100644 --- a/ESSArch_Core/frontend/static/frontend/scripts/services/storagePolicy.js +++ b/ESSArch_Core/frontend/static/frontend/scripts/services/storagePolicy.js @@ -1,3 +1,19 @@ export default ($resource, appConfig) => { - return $resource(appConfig.djangoUrl + 'storage-policies/:id/:action/', {id: '@id'}, {}); + return $resource( + appConfig.djangoUrl + 'storage-policies/:id/:action/', + {}, + { + get: { + method: 'GET', + params: {id: '@id'}, + }, + new: { + method: 'POST', + }, + update: { + method: 'PUT', + params: {id: '@id'}, + }, + } + ); }; diff --git a/ESSArch_Core/frontend/static/frontend/views/modals/storagePolicy-exists-modal.html b/ESSArch_Core/frontend/static/frontend/views/modals/storagePolicy-exists-modal.html new file mode 100644 index 000000000..261d98c92 --- /dev/null +++ b/ESSArch_Core/frontend/static/frontend/views/modals/storagePolicy-exists-modal.html @@ -0,0 +1,19 @@ + +
+ + +
diff --git a/ESSArch_Core/ip/models.py b/ESSArch_Core/ip/models.py index 5ddd67fe1..9ee55fee4 100644 --- a/ESSArch_Core/ip/models.py +++ b/ESSArch_Core/ip/models.py @@ -1716,12 +1716,21 @@ def get_temp_container_aic_xml_path(self): @retry(retry=retry_if_exception_type(RequestException), reraise=True, stop=stop_after_attempt(5), wait=wait_fixed(60), before_sleep=before_sleep_log(logger, logging.DEBUG)) def update_remote_ip(self, host, session): - from ESSArch_Core.ip.serializers import InformationPackageFromMasterSerializer + from ESSArch_Core.ip.serializers import ( + InformationPackageFromMasterSerializer, + ) remote_ip = urljoin(host, reverse('informationpackage-add-from-master')) data = InformationPackageFromMasterSerializer(instance=self).data - response = session.post(remote_ip, json=data, timeout=10) - response.raise_for_status() + response = None + try: + response = session.post(remote_ip, json=data, timeout=10) + response.raise_for_status() + except RequestException as e: + msg = 'Response: {response}, post_url: {post_url}, post_data: {post_data}'.format( + response=e.response.text, post_url=remote_ip, post_data=data) + logger.error(msg) + raise e @retry(retry=retry_if_exception_type(StorageMediumFull), reraise=True, stop=stop_after_attempt(2), wait=wait_fixed(60), before_sleep=before_sleep_log(logger, logging.DEBUG)) @@ -1759,6 +1768,8 @@ def preserve(self, src: list, storage_target, container: bool, task): task.result = remote_data['result'] task.traceback = remote_data['traceback'] task.exception = remote_data['exception'] + if task.status == 'SUCCESS': + storage_object = StorageObject.create_from_remote_copy(host, session, task.result) task.save() if task.status != celery_states.SUCCESS: @@ -1773,6 +1784,8 @@ def preserve(self, src: list, storage_target, container: bool, task): task.result = remote_data['result'] task.traceback = remote_data['traceback'] task.exception = remote_data['exception'] + if task.status == 'SUCCESS': + storage_object = StorageObject.create_from_remote_copy(host, session, task.result) task.save() sleep(5) @@ -1780,7 +1793,6 @@ def preserve(self, src: list, storage_target, container: bool, task): if task.status in celery_states.EXCEPTION_STATES: task.reraise() - storage_object = StorageObject.create_from_remote_copy(host, session, task.result) else: storage_medium, created = storage_target.get_or_create_storage_medium(qs=qs) diff --git a/ESSArch_Core/ip/serializers.py b/ESSArch_Core/ip/serializers.py index cac363c42..39606f727 100644 --- a/ESSArch_Core/ip/serializers.py +++ b/ESSArch_Core/ip/serializers.py @@ -31,11 +31,6 @@ SubmissionAgreementIPDataSerializer, ) from ESSArch_Core.profiles.utils import fill_specification_data, profile_types -from ESSArch_Core.storage.models import ( - StorageMethod, - StorageMethodTargetRelation, - StorageTarget, -) from ESSArch_Core.tags.models import ( Delivery, Structure, @@ -506,29 +501,19 @@ class Meta: class InformationPackageFromMasterSerializer(serializers.ModelSerializer): aic = InformationPackageAICSerializer(omit=['information_packages']) + organization = serializers.SerializerMethodField() + submission_agreement = serializers.PrimaryKeyRelatedField( + queryset=SubmissionAgreement.objects.all(), + pk_field=serializers.UUIDField(format='hex_verbose'), + ) - def create_storage_method(self, data): - storage_method_target_set_data = data.pop('storage_method_target_relations') - storage_method, _ = StorageMethod.objects.update_or_create( - id=data['id'], - defaults=data - ) - - for storage_method_target_data in storage_method_target_set_data: - storage_target_data = storage_method_target_data.pop('storage_target') - storage_target_data.pop('remote_server', None) - storage_target, _ = StorageTarget.objects.update_or_create( - id=storage_target_data['id'], - defaults=storage_target_data - ) - storage_method_target_data['storage_method'] = storage_method - storage_method_target_data['storage_target'] = storage_target - storage_method_target, _ = StorageMethodTargetRelation.objects.update_or_create( - id=storage_method_target_data['id'], - defaults=storage_method_target_data - ) - - return storage_method + def get_organization(self, obj): + try: + return GroupSerializer(obj.org[0].group).data + except AttributeError: + return GroupSerializer(obj.generic_groups.first().group).data + except IndexError: + return None def create(self, validated_data): aic_data = validated_data.pop('aic') @@ -553,12 +538,14 @@ class Meta: fields = ( 'id', 'label', 'object_identifier_value', 'object_size', 'object_path', 'package_type', 'responsible', 'create_date', - 'object_num_items', 'entry_date', 'state', 'status', 'step_state', + 'object_num_items', 'entry_date', 'state', 'archived', 'cached', 'aic', 'generation', 'message_digest', 'message_digest_algorithm', 'content_mets_create_date', 'content_mets_size', 'content_mets_digest_algorithm', 'content_mets_digest', 'package_mets_create_date', 'package_mets_size', 'package_mets_digest_algorithm', 'package_mets_digest', - 'start_date', 'end_date', 'appraisal_date', + 'start_date', 'end_date', 'appraisal_date', 'submission_agreement', 'organization', + # 'start_date', 'end_date', 'appraisal_date', 'profiles', 'policy', 'organization', 'submission_agreement', + # 'submission_agreement_locked', 'submission_agreement_data', 'submission_agreement_data_versions', ) extra_kwargs = { 'id': { @@ -570,6 +557,7 @@ class Meta: 'validators': [], }, } + validators = [] # Remove a default "unique together" constraint. class NestedInformationPackageSerializer(InformationPackageSerializer): diff --git a/ESSArch_Core/ip/views.py b/ESSArch_Core/ip/views.py index c3fdec5a7..78a430f64 100644 --- a/ESSArch_Core/ip/views.py +++ b/ESSArch_Core/ip/views.py @@ -1235,6 +1235,9 @@ def get_xsds(self, request, pk=None): @transaction.atomic @action(detail=False, methods=['post'], url_path='add-from-master') def add_from_master(self, request, pk=None): + self.logger.debug( + 'ip - views.py - add_from_master - request.data: %s' % (repr(request.data)) + ) serializer = InformationPackageFromMasterSerializer( data=request.data, context={'request': request}, ) diff --git a/ESSArch_Core/profiles/admin.py b/ESSArch_Core/profiles/admin.py index 3b74d887f..27629229e 100644 --- a/ESSArch_Core/profiles/admin.py +++ b/ESSArch_Core/profiles/admin.py @@ -57,7 +57,7 @@ def render_change_form(self, request, context, *args, **kwargs): fieldsets = ( (None, { 'classes': ('wide'), - 'fields': ('id', 'name', 'type', 'status', 'label', 'template',) + 'fields': ('id', 'name', 'type', 'status', 'label', 'policy', 'template',) }), ('Information about Archival organization', { 'classes': ('collapse', 'wide'), diff --git a/ESSArch_Core/profiles/migrations/0057_auto_20210304_1357.py b/ESSArch_Core/profiles/migrations/0057_auto_20210304_1357.py new file mode 100644 index 000000000..3141d8ff0 --- /dev/null +++ b/ESSArch_Core/profiles/migrations/0057_auto_20210304_1357.py @@ -0,0 +1,21 @@ +# Generated by Django 3.1.2 on 2021-03-04 12:57 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('configuration', '0024_auto_20200309_1535'), + ('profiles', '0056_auto_20201014_1920'), + ] + + operations = [ + migrations.AlterField( + model_name='submissionagreement', + name='policy', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='submission_agreements', + to='configuration.storagepolicy', verbose_name='storage policy'), + ), + ] diff --git a/ESSArch_Core/profiles/models.py b/ESSArch_Core/profiles/models.py index 8435ce7bd..0ea8799aa 100644 --- a/ESSArch_Core/profiles/models.py +++ b/ESSArch_Core/profiles/models.py @@ -199,6 +199,7 @@ class SubmissionAgreement(models.Model): 'configuration.StoragePolicy', on_delete=models.PROTECT, related_name='submission_agreements', + verbose_name=_('storage policy'), ) include_profile_transfer_project = models.BooleanField(default=False) include_profile_content_type = models.BooleanField(default=False) diff --git a/ESSArch_Core/storage/models.py b/ESSArch_Core/storage/models.py index 4432aeed3..523156f92 100644 --- a/ESSArch_Core/storage/models.py +++ b/ESSArch_Core/storage/models.py @@ -286,7 +286,7 @@ def save(self, *args, **kwargs): if StorageMethodTargetRelation.objects.filter( storage_method=self.storage_method, status=STORAGE_TARGET_STATUS_ENABLED, - ).exists(): + ).count() > 1: raise ValidationError(_('Only 1 target can be enabled for a storage method at a time'),) return super().save(*args, **kwargs) @@ -586,7 +586,10 @@ def create_from_remote_copy(cls, host, session, object_id): data = r.json() data.pop('location_status_display', None) data.pop('status_display', None) - data['storage_target_id'] = data.pop('storage_target') + if data.get('storage_target') is not None: + data['storage_target'] = StorageTarget.objects.get( + pk=data['storage_target'].pop('id'), + ) if data.get('tape_drive') is not None: data['tape_drive'] = TapeDrive.create_from_remote_copy( host, session, data['tape_drive'], create_storage_medium=False @@ -851,7 +854,7 @@ def read(self, dst, task, extract=False): # by master to write to its temp directory temp_dir = Path.objects.get(entity='temp').value - user, passw, host = storage_target.master_server.split(',') + host, user, passw = storage_target.master_server.split(',') session = requests.Session() session.verify = settings.REQUESTS_VERIFY session.auth = (user, passw)